Example #1
0
    def _mockRpcUtils(self, to_return, control_file_substring=''):
        """Fake out the autotest rpc_utils module with a mockable class.

        @param to_return: the value that rpc_utils.create_job_common() should
                          be mocked out to return.
        @param control_file_substring: A substring that is expected to appear
                                       in the control file output string that
                                       is passed to create_job_common.
                                       Default: ''
        """
        download_started_time = constants.DOWNLOAD_STARTED_TIME
        payload_finished_time = constants.PAYLOAD_FINISHED_TIME
        self.mox.StubOutWithMock(rpc_utils, 'create_job_common')
        rpc_utils.create_job_common(
            mox.And(mox.StrContains(self._NAME), mox.StrContains(self._BUILD)),
            priority=self._PRIORITY,
            timeout_mins=self._TIMEOUT * 60,
            max_runtime_mins=self._TIMEOUT * 60,
            control_type='Server',
            control_file=mox.And(mox.StrContains(self._BOARD),
                                 mox.StrContains(self._BUILD),
                                 mox.StrContains(control_file_substring)),
            hostless=True,
            keyvals=mox.And(
                mox.In(download_started_time),
                mox.In(payload_finished_time))).AndReturn(to_return)
def powerwash_dut_to_test_repair(hostname, timeout):
    """Powerwash dut to test repair workflow.

    @param hostname: hostname of the dut.
    @param timeout: seconds of the powerwash test to hit timeout.
    @raise TestPushException: if DUT fail to run the test.
    """
    t = models.Test.objects.get(name='platform_Powerwash')
    c = utils.read_file(os.path.join(AUTOTEST_DIR, t.path))
    job_id = rpc_utils.create_job_common('powerwash',
                                         priority=priorities.Priority.SUPER,
                                         control_type='Server',
                                         control_file=c,
                                         hosts=[hostname])

    end = time.time() + timeout
    while not TKO.get_job_test_statuses_from_db(job_id):
        if time.time() >= end:
            AFE.run('abort_host_queue_entries', job=job_id)
            raise TestPushException(
                'Powerwash test on %s timeout after %ds, abort it.' %
                (hostname, timeout))
        time.sleep(10)
    verify_test_results(job_id,
                        test_push_common.EXPECTED_TEST_RESULTS_POWERWASH)
    # Kick off verify, verify will fail and a repair should be triggered.
    AFE.reverify_hosts(hostnames=[hostname])
Example #3
0
def create_job(name,
               priority,
               control_file,
               control_type,
               hosts=(),
               meta_hosts=(),
               one_time_hosts=(),
               atomic_group_name=None,
               synch_count=None,
               is_template=False,
               timeout=None,
               max_runtime_hrs=None,
               run_verify=True,
               email_list='',
               dependencies=(),
               reboot_before=None,
               reboot_after=None,
               parse_failed_repair=None,
               hostless=False,
               keyvals=None,
               drone_set=None):
    """\
    Create and enqueue a job.

    @param name name of this job
    @param priority Low, Medium, High, Urgent
    @param control_file String contents of the control file.
    @param control_type Type of control file, Client or Server.
    @param synch_count How many machines the job uses per autoserv execution.
    synch_count == 1 means the job is asynchronous.  If an atomic group is
    given this value is treated as a minimum.
    @param is_template If true then create a template job.
    @param timeout Hours after this call returns until the job times out.
    @param max_runtime_hrs Hours from job starting time until job times out
    @param run_verify Should the host be verified before running the test?
    @param email_list String containing emails to mail when the job is done
    @param dependencies List of label names on which this job depends
    @param reboot_before Never, If dirty, or Always
    @param reboot_after Never, If all tests passed, or Always
    @param parse_failed_repair if true, results of failed repairs launched by
    this job will be parsed as part of the job.
    @param hostless if true, create a hostless job
    @param keyvals dict of keyvals to associate with the job

    @param hosts List of hosts to run job on.
    @param meta_hosts List where each entry is a label name, and for each entry
    one host will be chosen from that label to run the job on.
    @param one_time_hosts List of hosts not in the database to run the job on.
    @param atomic_group_name The name of an atomic group to schedule the job on.
    @param drone_set The name of the drone set to run this test on.


    @returns The created Job id number.
    """
    return rpc_utils.create_job_common(
        **rpc_utils.get_create_job_common_args(locals()))
Example #4
0
def create_job(name, priority, control_file, control_type,
               hosts=(), meta_hosts=(), one_time_hosts=(),
               atomic_group_name=None, synch_count=None, is_template=False,
               timeout=None, max_runtime_hrs=None, run_verify=True,
               email_list='', dependencies=(), reboot_before=None,
               reboot_after=None, parse_failed_repair=None, hostless=False,
               keyvals=None, drone_set=None):
    """\
    Create and enqueue a job.

    @param name name of this job
    @param priority Low, Medium, High, Urgent
    @param control_file String contents of the control file.
    @param control_type Type of control file, Client or Server.
    @param synch_count How many machines the job uses per autoserv execution.
    synch_count == 1 means the job is asynchronous.  If an atomic group is
    given this value is treated as a minimum.
    @param is_template If true then create a template job.
    @param timeout Hours after this call returns until the job times out.
    @param max_runtime_hrs Hours from job starting time until job times out
    @param run_verify Should the host be verified before running the test?
    @param email_list String containing emails to mail when the job is done
    @param dependencies List of label names on which this job depends
    @param reboot_before Never, If dirty, or Always
    @param reboot_after Never, If all tests passed, or Always
    @param parse_failed_repair if true, results of failed repairs launched by
    this job will be parsed as part of the job.
    @param hostless if true, create a hostless job
    @param keyvals dict of keyvals to associate with the job

    @param hosts List of hosts to run job on.
    @param meta_hosts List where each entry is a label name, and for each entry
    one host will be chosen from that label to run the job on.
    @param one_time_hosts List of hosts not in the database to run the job on.
    @param atomic_group_name The name of an atomic group to schedule the job on.
    @param drone_set The name of the drone set to run this test on.


    @returns The created Job id number.
    """
    return rpc_utils.create_job_common(
            **rpc_utils.get_create_job_common_args(locals()))
Example #5
0
def create_suite_job(name='',
                     board='',
                     build='',
                     pool='',
                     control_file='',
                     check_hosts=True,
                     num=None,
                     file_bugs=False,
                     timeout=24,
                     timeout_mins=None,
                     priority=priorities.Priority.DEFAULT,
                     suite_args=None,
                     wait_for_results=True,
                     job_retry=False,
                     max_retries=None,
                     max_runtime_mins=None,
                     suite_min_duts=0,
                     offload_failures_only=False,
                     builds={},
                     test_source_build=None,
                     run_prod_code=False,
                     **kwargs):
    """
    Create a job to run a test suite on the given device with the given image.

    When the timeout specified in the control file is reached, the
    job is guaranteed to have completed and results will be available.

    @param name: The test name if control_file is supplied, otherwise the name
                 of the test suite to run, e.g. 'bvt'.
    @param board: the kind of device to run the tests on.
    @param build: unique name by which to refer to the image from now on.
    @param builds: the builds to install e.g.
                   {'cros-version:': 'x86-alex-release/R18-1655.0.0',
                    'fw-version:':  'x86-alex-firmware/R36-5771.50.0',
                    'fwro-version:':  'x86-alex-firmware/R36-5771.49.0'}
                   If builds is given a value, it overrides argument build.
    @param test_source_build: Build that contains the server-side test code.
    @param pool: Specify the pool of machines to use for scheduling
            purposes.
    @param check_hosts: require appropriate live hosts to exist in the lab.
    @param num: Specify the number of machines to schedule across (integer).
                Leave unspecified or use None to use default sharding factor.
    @param file_bugs: File a bug on each test failure in this suite.
    @param timeout: The max lifetime of this suite, in hours.
    @param timeout_mins: The max lifetime of this suite, in minutes. Takes
                         priority over timeout.
    @param priority: Integer denoting priority. Higher is more important.
    @param suite_args: Optional arguments which will be parsed by the suite
                       control file. Used by control.test_that_wrapper to
                       determine which tests to run.
    @param wait_for_results: Set to False to run the suite job without waiting
                             for test jobs to finish. Default is True.
    @param job_retry: Set to True to enable job-level retry. Default is False.
    @param max_retries: Integer, maximum job retries allowed at suite level.
                        None for no max.
    @param max_runtime_mins: Maximum amount of time a job can be running in
                             minutes.
    @param suite_min_duts: Integer. Scheduler will prioritize getting the
                           minimum number of machines for the suite when it is
                           competing with another suite that has a higher
                           priority but already got minimum machines it needs.
    @param offload_failures_only: Only enable gs_offloading for failed jobs.
    @param run_prod_code: If True, the suite will run the test code that
                          lives in prod aka the test code currently on the
                          lab servers. If False, the control files and test
                          code for this suite run will be retrieved from the
                          build artifacts.
    @param kwargs: extra keyword args. NOT USED.

    @raises ControlFileNotFound: if a unique suite control file doesn't exist.
    @raises NoControlFileList: if we can't list the control files at all.
    @raises StageControlFileFailure: If the dev server throws 500 while
                                     staging test_suites.
    @raises ControlFileEmpty: if the control file exists on the server, but
                              can't be read.

    @return: the job ID of the suite; -1 on error.
    """
    if type(num) is not int and num is not None:
        raise error.SuiteArgumentException('Ill specified num argument %r. '
                                           'Must be an integer or None.' % num)
    if num == 0:
        logging.warning("Can't run on 0 hosts; using default.")
        num = None

    # TODO(dshi): crbug.com/496782 Remove argument build and its reference after
    # R45 falls out of stable channel.
    if build and not builds:
        builds = {provision.CROS_VERSION_PREFIX: build}
    # TODO(dshi): crbug.com/497236 Remove this check after firmware ro provision
    # is supported in Autotest.
    if provision.FW_RO_VERSION_PREFIX in builds:
        raise error.SuiteArgumentException(
            'Updating RO firmware is not supported yet.')
    # Default test source build to CrOS build if it's not specified.
    test_source_build = Suite.get_test_source_build(
        builds, test_source_build=test_source_build)

    suite_name = canonicalize_suite_name(name)
    if run_prod_code:
        ds = dev_server.ImageServer.resolve(build)
        keyvals = {}
        getter = control_file_getter.FileSystemGetter([
            _CONFIG.get_config_value('SCHEDULER',
                                     'drone_installation_directory')
        ])
        control_file = getter.get_control_file_contents_by_name(suite_name)
    else:
        (ds, keyvals) = _stage_build_artifacts(test_source_build)
    keyvals[constants.SUITE_MIN_DUTS_KEY] = suite_min_duts

    if not control_file:
        # No control file was supplied so look it up from the build artifacts.
        suite_name = canonicalize_suite_name(name)
        control_file = _get_control_file_contents_by_name(
            test_source_build, ds, suite_name)
        # Do not change this naming convention without updating
        # site_utils.parse_job_name.
        name = '%s-%s' % (test_source_build, suite_name)

    timeout_mins = timeout_mins or timeout * 60
    max_runtime_mins = max_runtime_mins or timeout * 60

    if not board:
        board = utils.ParseBuildName(builds[provision.CROS_VERSION_PREFIX])[0]

    # TODO(dshi): crbug.com/496782 Remove argument build and its reference after
    # R45 falls out of stable channel.
    # Prepend build and board to the control file.
    inject_dict = {
        'board': board,
        'build': builds.get(provision.CROS_VERSION_PREFIX),
        'builds': builds,
        'check_hosts': check_hosts,
        'pool': pool,
        'num': num,
        'file_bugs': file_bugs,
        'timeout': timeout,
        'timeout_mins': timeout_mins,
        'devserver_url': ds.url(),
        'priority': priority,
        'suite_args': suite_args,
        'wait_for_results': wait_for_results,
        'job_retry': job_retry,
        'max_retries': max_retries,
        'max_runtime_mins': max_runtime_mins,
        'offload_failures_only': offload_failures_only,
        'test_source_build': test_source_build,
        'run_prod_code': run_prod_code
    }

    control_file = tools.inject_vars(inject_dict, control_file)

    return rpc_utils.create_job_common(name,
                                       priority=priority,
                                       timeout_mins=timeout_mins,
                                       max_runtime_mins=max_runtime_mins,
                                       control_type='Server',
                                       control_file=control_file,
                                       hostless=True,
                                       keyvals=keyvals)
Example #6
0
def create_parameterized_job(name,
                             priority,
                             test,
                             parameters,
                             kernel=None,
                             label=None,
                             profilers=(),
                             profiler_parameters=None,
                             use_container=False,
                             profile_only=None,
                             upload_kernel_config=False,
                             hosts=(),
                             meta_hosts=(),
                             one_time_hosts=(),
                             atomic_group_name=None,
                             synch_count=None,
                             is_template=False,
                             timeout=None,
                             max_runtime_hrs=None,
                             run_verify=True,
                             email_list='',
                             dependencies=(),
                             reboot_before=None,
                             reboot_after=None,
                             parse_failed_repair=None,
                             hostless=False,
                             keyvals=None,
                             drone_set=None):
    """
    Creates and enqueues a parameterized job.

    Most parameters a combination of the parameters for generate_control_file()
    and create_job(), with the exception of:

    @param test name or ID of the test to run
    @param parameters a map of parameter name ->
                          tuple of (param value, param type)
    @param profiler_parameters a dictionary of parameters for the profilers:
                                   key: profiler name
                                   value: dict of param name -> tuple of
                                                                (param value,
                                                                 param type)
    """
    # Save the values of the passed arguments here. What we're going to do with
    # them is pass them all to rpc_utils.get_create_job_common_args(), which
    # will extract the subset of these arguments that apply for
    # rpc_utils.create_job_common(), which we then pass in to that function.
    args = locals()

    # Set up the parameterized job configs
    test_obj = models.Test.smart_get(test)
    if test_obj.test_type == model_attributes.TestTypes.SERVER:
        control_type = models.Job.ControlType.SERVER
    else:
        control_type = models.Job.ControlType.CLIENT

    try:
        label = models.Label.smart_get(label)
    except models.Label.DoesNotExist:
        label = None

    kernel_objs = models.Kernel.create_kernels(kernel)
    profiler_objs = [
        models.Profiler.smart_get(profiler) for profiler in profilers
    ]

    parameterized_job = models.ParameterizedJob.objects.create(
        test=test_obj,
        label=label,
        use_container=use_container,
        profile_only=profile_only,
        upload_kernel_config=upload_kernel_config)
    parameterized_job.kernels.add(*kernel_objs)

    for profiler in profiler_objs:
        parameterized_profiler = models.ParameterizedJobProfiler.objects.create(
            parameterized_job=parameterized_job, profiler=profiler)
        profiler_params = profiler_parameters.get(profiler.name, {})
        for name, (value, param_type) in profiler_params.iteritems():
            models.ParameterizedJobProfilerParameter.objects.create(
                parameterized_job_profiler=parameterized_profiler,
                parameter_name=name,
                parameter_value=value,
                parameter_type=param_type)

    try:
        for parameter in test_obj.testparameter_set.all():
            if parameter.name in parameters:
                param_value, param_type = parameters.pop(parameter.name)
                parameterized_job.parameterizedjobparameter_set.create(
                    test_parameter=parameter,
                    parameter_value=param_value,
                    parameter_type=param_type)

        if parameters:
            raise Exception('Extra parameters remain: %r' % parameters)

        return rpc_utils.create_job_common(
            parameterized_job=parameterized_job.id,
            control_type=control_type,
            **rpc_utils.get_create_job_common_args(args))
    except:
        parameterized_job.delete()
        raise
Example #7
0
def create_parameterized_job(name, priority, test, parameters, kernel=None,
                             label=None, profilers=(), profiler_parameters=None,
                             use_container=False, profile_only=None,
                             upload_kernel_config=False, hosts=(),
                             meta_hosts=(), one_time_hosts=(),
                             atomic_group_name=None, synch_count=None,
                             is_template=False, timeout=None,
                             max_runtime_hrs=None, run_verify=True,
                             email_list='', dependencies=(), reboot_before=None,
                             reboot_after=None, parse_failed_repair=None,
                             hostless=False, keyvals=None, drone_set=None):
    """
    Creates and enqueues a parameterized job.

    Most parameters a combination of the parameters for generate_control_file()
    and create_job(), with the exception of:

    @param test name or ID of the test to run
    @param parameters a map of parameter name ->
                          tuple of (param value, param type)
    @param profiler_parameters a dictionary of parameters for the profilers:
                                   key: profiler name
                                   value: dict of param name -> tuple of
                                                                (param value,
                                                                 param type)
    """
    # Save the values of the passed arguments here. What we're going to do with
    # them is pass them all to rpc_utils.get_create_job_common_args(), which
    # will extract the subset of these arguments that apply for
    # rpc_utils.create_job_common(), which we then pass in to that function.
    args = locals()

    # Set up the parameterized job configs
    test_obj = models.Test.smart_get(test)
    if test_obj.test_type == model_attributes.TestTypes.SERVER:
        control_type = models.Job.ControlType.SERVER
    else:
        control_type = models.Job.ControlType.CLIENT

    try:
        label = models.Label.smart_get(label)
    except models.Label.DoesNotExist:
        label = None

    kernel_objs = models.Kernel.create_kernels(kernel)
    profiler_objs = [models.Profiler.smart_get(profiler)
                     for profiler in profilers]

    parameterized_job = models.ParameterizedJob.objects.create(
            test=test_obj, label=label, use_container=use_container,
            profile_only=profile_only,
            upload_kernel_config=upload_kernel_config)
    parameterized_job.kernels.add(*kernel_objs)

    for profiler in profiler_objs:
        parameterized_profiler = models.ParameterizedJobProfiler.objects.create(
                parameterized_job=parameterized_job,
                profiler=profiler)
        profiler_params = profiler_parameters.get(profiler.name, {})
        for name, (value, param_type) in profiler_params.iteritems():
            models.ParameterizedJobProfilerParameter.objects.create(
                    parameterized_job_profiler=parameterized_profiler,
                    parameter_name=name,
                    parameter_value=value,
                    parameter_type=param_type)

    try:
        for parameter in test_obj.testparameter_set.all():
            if parameter.name in parameters:
                param_value, param_type = parameters.pop(parameter.name)
                parameterized_job.parameterizedjobparameter_set.create(
                        test_parameter=parameter, parameter_value=param_value,
                        parameter_type=param_type)

        if parameters:
            raise Exception('Extra parameters remain: %r' % parameters)

        return rpc_utils.create_job_common(
                parameterized_job=parameterized_job.id,
                control_type=control_type,
                **rpc_utils.get_create_job_common_args(args))
    except:
        parameterized_job.delete()
        raise