Ejemplo n.º 1
0
    def PerformStage(self):
        """Do the work of generating our release payloads."""
        # Convert to release tools naming for boards.
        board = self._current_board.replace('_', '-')
        version = self._run.attrs.release_tag

        assert version, "We can't generate payloads without a release_tag."
        logging.info("Generating payloads for: %s, %s", board, version)

        # Test to see if the current board has a Paygen configuration. We do
        # this here, not in the sub-process so we don't have to pass back a
        # failure reason.
        try:
            paygen_build_lib.ValidateBoardConfig(board)
        except paygen_build_lib.BoardNotConfigured:
            raise PaygenNoPaygenConfigForBoard(
                'Golden Eye (%s) has no entry for board %s. Get a TPM to fix.'
                % (paygen_build_lib.PAYGEN_URI, board))

        # Default to False, set to True if it's a canary type build
        skip_duts_check = False
        if config_lib.IsCanaryType(self._run.config.build_type):
            skip_duts_check = True

        with parallel.BackgroundTaskRunner(
                self._RunPaygenInProcess) as per_channel:
            logging.info("Using channels: %s", self.channels)

            # If we have an explicit list of channels, use it.
            for channel in self.channels:
                per_channel.put((channel, board, version, self._run.debug,
                                 self._run.config.paygen_skip_testing,
                                 self._run.config.paygen_skip_delta_payloads,
                                 skip_duts_check))
Ejemplo n.º 2
0
  def PerformStage(self):
    if not config_lib.IsCanaryType(self._run.config.build_type):
      logging.info('This stage runs only in release builders.')
      return

    # Get the Android versions set by AndroidMetadataStage.
    version_dict = self._run.attrs.metadata.GetDict().get('version', {})
    android_build_branch = version_dict.get('android-branch')
    android_version = version_dict.get('android')

    # On boards not supporting Android, versions will be None.
    if not (android_build_branch and android_version):
      logging.info('Android is not enabled on this board. Skipping.')
      return

    logging.info(
        'Downloading symbols of Android %s (%s)...',
        android_version, android_build_branch)

    arch = self._run.DetermineAndroidABI(self._current_board)

    symbols_file_url = constants.ANDROID_SYMBOLS_URL_TEMPLATE % {
        'branch': android_build_branch,
        'arch': arch,
        'version': android_version}
    symbols_file = os.path.join(self.archive_path,
                                constants.ANDROID_SYMBOLS_FILE)
    gs_context = gs.GSContext()
    gs_context.Copy(symbols_file_url, symbols_file)
    def GetSyncInstance(self):
        """Syncs the tree using one of the distributed sync logic paths.

    Returns:
      The instance of the sync stage to run.
    """
        # Determine sync class to use.  CQ overrides PFQ bits so should check it
        # first.
        if config_lib.IsCanaryType(self._run.config.build_type):
            sync_stage = self._GetStageInstance(
                sync_stages.ManifestVersionedSyncStage)
            self.completion_stage_class = (
                completion_stages.CanaryCompletionStage)
        elif (config_lib.IsPFQType(self._run.config.build_type)
              or self._run.config.build_type
              in (constants.TOOLCHAIN_TYPE, constants.FULL_TYPE,
                  constants.INCREMENTAL_TYPE)):
            sync_stage = self._GetStageInstance(
                sync_stages.MasterSlaveLKGMSyncStage)
            self.completion_stage_class = (
                completion_stages.MasterSlaveSyncCompletionStage)
        else:
            sync_stage = self._GetStageInstance(
                sync_stages.ManifestVersionedSyncStage)
            self.completion_stage_class = (
                completion_stages.ManifestVersionedSyncCompletionStage)

        self.sync_stage = sync_stage
        return self.sync_stage
Ejemplo n.º 4
0
  def PerformStage(self):
    if self.suite_config.suite == constants.HWTEST_AFDO_SUITE:
      arch = self._GetPortageEnvVar('ARCH', self._current_board)
      cpv = portage_util.PortageqBestVisible(
          constants.CHROME_CP, cwd=self._build_root)
      # For async AFDO builders, need to skip this check because it's checking
      # a different bucket for PFQ AFDO. Also for async AFDO builders, no need
      # to check here because there's an earlier check to avoid generating
      # AFDO for the same version.
      if not self._run.config.afdo_generate_async and \
         afdo.CheckAFDOPerfData(cpv, arch, gs.GSContext()):
        logging.info(
            'AFDO profile already generated for arch %s '
            'and Chrome %s. Not generating it again', arch,
            cpv.version_no_rev.split('_')[0])
        return

    build = '/'.join([self._bot_id, self.version])

    skip_duts_check = False
    if config_lib.IsCanaryType(self._run.config.build_type):
      skip_duts_check = True

    cmd_result = commands.RunHWTestSuite(
        build,
        self.suite_config.suite,
        self._board_name,
        model=self._model,
        pool=self.suite_config.pool,
        file_bugs=self.suite_config.file_bugs,
        wait_for_results=self.wait_for_results,
        priority=self.suite_config.priority,
        timeout_mins=self.suite_config.timeout_mins,
        retry=self.suite_config.retry,
        max_retries=self.suite_config.max_retries,
        minimum_duts=self.suite_config.minimum_duts,
        suite_min_duts=self.suite_config.suite_min_duts,
        suite_args=self.suite_config.suite_args,
        offload_failures_only=self.suite_config.offload_failures_only,
        debug=not self.TestsEnabled(self._run),
        skip_duts_check=skip_duts_check,
        job_keyvals=self.GetJobKeyvals(),
        test_args=None)

    if cmd_result.to_raise:
      raise cmd_result.to_raise
Ejemplo n.º 5
0
    def testNoDuplicateCanaryBuildersOnWaterfall(self):
        seen = {}
        for config in self.site_config.itervalues():
            waterfall = config['active_waterfall']
            btype = config['build_type']
            if not (waterfall and config_lib.IsCanaryType(btype)):
                continue

            waterfall_seen = seen.setdefault(waterfall, set())
            stack = [config]
            while stack:
                current_config = stack.pop()
                self.assertNotIn(
                    current_config['name'], waterfall_seen,
                    "Multiple builders for '%s' on '%s' waterfall" %
                    (current_config['name'], waterfall))
                waterfall_seen.add(current_config['name'])
                stack += current_config['child_configs']
Ejemplo n.º 6
0
    def GetSyncInstance(self):
        """Syncs the tree using one of the distributed sync logic paths.

    Returns:
      The instance of the sync stage to run.
    """
        # Determine sync class to use.  CQ overrides PFQ bits so should check it
        # first.
        if self._run.config.pre_cq:
            sync_stage = self._GetStageInstance(sync_stages.PreCQSyncStage,
                                                self.patch_pool.gerrit_patches)
            self.completion_stage_class = completion_stages.PreCQCompletionStage
            self.patch_pool.gerrit_patches = []
        elif config_lib.IsCQType(self._run.config.build_type):
            if self._run.config.do_not_apply_cq_patches:
                sync_stage = self._GetStageInstance(
                    sync_stages.MasterSlaveLKGMSyncStage)
            else:
                sync_stage = self._GetStageInstance(
                    sync_stages.CommitQueueSyncStage)
            self.completion_stage_class = completion_stages.CommitQueueCompletionStage
        elif config_lib.IsPFQType(self._run.config.build_type):
            sync_stage = self._GetStageInstance(
                sync_stages.MasterSlaveLKGMSyncStage)
            self.completion_stage_class = (
                completion_stages.MasterSlaveSyncCompletionStage)
        elif config_lib.IsCanaryType(self._run.config.build_type):
            sync_stage = self._GetStageInstance(
                sync_stages.ManifestVersionedSyncStage)
            self.completion_stage_class = (
                completion_stages.CanaryCompletionStage)
        elif self._run.config.build_type == constants.TOOLCHAIN_TYPE:
            sync_stage = self._GetStageInstance(
                sync_stages.MasterSlaveLKGMSyncStage)
            self.completion_stage_class = (
                completion_stages.MasterSlaveSyncCompletionStage)
        else:
            sync_stage = self._GetStageInstance(
                sync_stages.ManifestVersionedSyncStage)
            self.completion_stage_class = (
                completion_stages.ManifestVersionedSyncCompletionStage)

        self.sync_stage = sync_stage
        return self.sync_stage
Ejemplo n.º 7
0
    def GetSyncInstance(self):
        """Syncs the tree using one of the distributed sync logic paths.

    Returns:
      The instance of the sync stage to run.
    """
        # Determine sync class to use.  CQ overrides PFQ bits so should check it
        # first.
        if self._run.config.pre_cq:
            assert False, 'Pre-CQ no longer supported'
        elif config_lib.IsCQType(self._run.config.build_type):
            assert False, 'Legacy CQ no longer supported'
            if self._run.config.do_not_apply_cq_patches:
                sync_stage = self._GetStageInstance(
                    sync_stages.MasterSlaveLKGMSyncStage)
        elif config_lib.IsCanaryType(self._run.config.build_type):
            sync_stage = self._GetStageInstance(
                sync_stages.ManifestVersionedSyncStage)
            self.completion_stage_class = (
                completion_stages.CanaryCompletionStage)
        elif self._run.config.build_type == constants.CHROME_PFQ_TYPE:
            assert False, 'Chrome PFQ no longer supported'
        elif (config_lib.IsPFQType(self._run.config.build_type)
              or self._run.config.build_type
              in (constants.TOOLCHAIN_TYPE, constants.FULL_TYPE,
                  constants.INCREMENTAL_TYPE, constants.POSTSUBMIT_TYPE)):
            sync_stage = self._GetStageInstance(
                sync_stages.MasterSlaveLKGMSyncStage)
            self.completion_stage_class = (
                completion_stages.MasterSlaveSyncCompletionStage)
        else:
            sync_stage = self._GetStageInstance(
                sync_stages.ManifestVersionedSyncStage)
            self.completion_stage_class = (
                completion_stages.ManifestVersionedSyncCompletionStage)

        self.sync_stage = sync_stage
        return self.sync_stage
Ejemplo n.º 8
0
    def PerformStage(self):
        if self.suite_config.suite == constants.HWTEST_AFDO_SUITE:
            arch = self._GetPortageEnvVar('ARCH', self._current_board)
            cpv = portage_util.PortageqBestVisible(constants.CHROME_CP,
                                                   cwd=self._build_root)
            # For async AFDO builders, need to skip this check because it's checking
            # a different bucket for PFQ AFDO. Also for async AFDO builders, no need
            # to check here because there's an earlier check to avoid generating
            # AFDO for the same version.
            if not self._run.config.afdo_generate_async and \
               afdo.CheckAFDOPerfData(cpv, arch, gs.GSContext()):
                logging.info(
                    'AFDO profile already generated for arch %s '
                    'and Chrome %s. Not generating it again', arch,
                    cpv.version_no_rev.split('_')[0])
                return

        if self.suite_config.suite in [
                constants.HWTEST_CTS_QUAL_SUITE,
                constants.HWTEST_GTS_QUAL_SUITE
        ]:
            # Increase the priority for CTS/GTS qualification suite as we want stable
            # build to have higher priority than beta build (again higher than dev).
            try:
                cros_vers = self._run.GetVersionInfo().VersionString().split(
                    '.')
                # Convert priority to corresponding integer value.
                self.suite_config.priority = constants.HWTEST_PRIORITIES_MAP[
                    self.suite_config.priority]
                # We add 1/10 of the branch version to the priority. This results in a
                # modest priority bump the older the branch is. Typically beta priority
                # would be dev + [1..4] and stable priority dev + [5..9].
                self.suite_config.priority += int(
                    math.ceil(float(cros_vers[1]) / 10.0))
            except cbuildbot_run.VersionNotSetError:
                logging.debug(
                    'Could not obtain version info. %s will use initial '
                    'priority value: %s', self.suite_config.suite,
                    self.suite_config.priority)

        build = '/'.join([self._bot_id, self.version])

        skip_duts_check = False
        if config_lib.IsCanaryType(self._run.config.build_type):
            skip_duts_check = True

        cmd_result = commands.RunHWTestSuite(
            build,
            self.suite_config.suite,
            self._board_name,
            model=self._model,
            pool=self.suite_config.pool,
            file_bugs=self.suite_config.file_bugs,
            wait_for_results=self.wait_for_results,
            priority=self.suite_config.priority,
            timeout_mins=self.suite_config.timeout_mins,
            retry=self.suite_config.retry,
            max_retries=self.suite_config.max_retries,
            minimum_duts=self.suite_config.minimum_duts,
            suite_min_duts=self.suite_config.suite_min_duts,
            suite_args=self.suite_config.suite_args,
            offload_failures_only=self.suite_config.offload_failures_only,
            debug=not self.TestsEnabled(self._run),
            skip_duts_check=skip_duts_check,
            job_keyvals=self.GetJobKeyvals(),
            test_args=None)

        if cmd_result.to_raise:
            raise cmd_result.to_raise
Ejemplo n.º 9
0
  def PerformStage(self):
    if self.suite_config.suite == constants.HWTEST_AFDO_SUITE:
      arch = self._GetPortageEnvVar('ARCH', self._current_board)
      cpv = portage_util.BestVisible(constants.CHROME_CP,
                                     buildroot=self._build_root)
      if afdo.CheckAFDOPerfData(cpv, arch, gs.GSContext()):
        logging.info('AFDO profile already generated for arch %s '
                     'and Chrome %s. Not generating it again',
                     arch, cpv.version_no_rev.split('_')[0])
        return

    build = '/'.join([self._bot_id, self.version])
    if (self._run.options.remote_trybot and (self._run.options.hwtest or
                                             self._run.config.pre_cq)):
      debug = self._run.options.debug_forced
    else:
      debug = self._run.options.debug

    # Get the subsystems set for the board to test
    per_board_dict = self._run.attrs.metadata.GetDict()['board-metadata']
    current_board_dict = per_board_dict.get(self._current_board)
    if current_board_dict:
      subsystems = set(current_board_dict.get('subsystems_to_test', []))
      # 'subsystem:all' indicates to skip the subsystem logic
      if 'all' in subsystems:
        subsystems = None
    else:
      subsystems = None

    skip_duts_check = False
    if config_lib.IsCanaryType(self._run.config.build_type):
      skip_duts_check = True

    build_id, db = self._run.GetCIDBHandle()
    cmd_result = commands.RunHWTestSuite(
        build, self.suite_config.suite, self._current_board,
        pool=self.suite_config.pool, num=self.suite_config.num,
        file_bugs=self.suite_config.file_bugs,
        wait_for_results=self.wait_for_results,
        priority=self.suite_config.priority,
        timeout_mins=self.suite_config.timeout_mins,
        retry=self.suite_config.retry,
        max_retries=self.suite_config.max_retries,
        minimum_duts=self.suite_config.minimum_duts,
        suite_min_duts=self.suite_config.suite_min_duts,
        offload_failures_only=self.suite_config.offload_failures_only,
        debug=debug, subsystems=subsystems, skip_duts_check=skip_duts_check)
    subsys_tuple = self.GenerateSubsysResult(cmd_result.json_dump_result,
                                             subsystems)
    if db:
      if not subsys_tuple:
        db.InsertBuildMessage(build_id, message_type=constants.SUBSYSTEMS,
                              message_subtype=constants.SUBSYSTEM_UNUSED,
                              board=self._current_board)
      else:
        logging.info('pass_subsystems: %s, fail_subsystems: %s',
                     subsys_tuple[0], subsys_tuple[1])
        for s in subsys_tuple[0]:
          db.InsertBuildMessage(build_id, message_type=constants.SUBSYSTEMS,
                                message_subtype=constants.SUBSYSTEM_PASS,
                                message_value=str(s), board=self._current_board)
        for s in subsys_tuple[1]:
          db.InsertBuildMessage(build_id, message_type=constants.SUBSYSTEMS,
                                message_subtype=constants.SUBSYSTEM_FAIL,
                                message_value=str(s), board=self._current_board)
    if cmd_result.to_raise:
      raise cmd_result.to_raise
Ejemplo n.º 10
0
  def PerformStage(self):
    if self.suite_config.suite == constants.HWTEST_AFDO_SUITE:
      arch = self._GetPortageEnvVar('ARCH', self._current_board)
      cpv = portage_util.BestVisible(constants.CHROME_CP,
                                     buildroot=self._build_root)
      if afdo.CheckAFDOPerfData(cpv, arch, gs.GSContext()):
        logging.info('AFDO profile already generated for arch %s '
                     'and Chrome %s. Not generating it again',
                     arch, cpv.version_no_rev.split('_')[0])
        return

    if self.suite_config.suite in [constants.HWTEST_CTS_FOLLOWER_SUITE,
                                   constants.HWTEST_CTS_QUAL_SUITE,
                                   constants.HWTEST_GTS_QUAL_SUITE]:
      # Increase the priority for CTS/GTS qualification suite as we want stable
      # build to have higher priority than beta build (again higher than dev).
      try:
        cros_vers = self._run.GetVersionInfo().VersionString().split('.')
        if not isinstance(self.suite_config.priority, (int, long)):
          # Convert CTS/GTS priority to corresponding integer value.
          self.suite_config.priority = constants.HWTEST_PRIORITIES_MAP[
              self.suite_config.priority]
        # We add 1/10 of the branch version to the priority. This results in a
        # modest priority bump the older the branch is. Typically beta priority
        # would be dev + [1..4] and stable priority dev + [5..9].
        self.suite_config.priority += int(math.ceil(float(cros_vers[1]) / 10.0))
      except cbuildbot_run.VersionNotSetError:
        logging.debug('Could not obtain version info. %s will use initial '
                      'priority value: %s', self.suite_config.suite,
                      self.suite_config.priority)

    build = '/'.join([self._bot_id, self.version])

    # Get the subsystems set for the board to test
    if self.suite_config.suite == constants.HWTEST_PROVISION_SUITE:
      subsystems = set()
    else:
      subsystems = self._GetSubsystems()

    skip_duts_check = False
    if config_lib.IsCanaryType(self._run.config.build_type):
      skip_duts_check = True

    build_id, db = self._run.GetCIDBHandle()

    test_args = None
    if config_lib.IsCQType(self._run.config.build_type):
      test_args = {'fast': 'True'}

    cmd_result = commands.RunHWTestSuite(
        build, self.suite_config.suite, self._board_name,
        model=self._model,
        pool=self.suite_config.pool,
        file_bugs=self.suite_config.file_bugs,
        wait_for_results=self.wait_for_results,
        priority=self.suite_config.priority,
        timeout_mins=self.suite_config.timeout_mins,
        retry=self.suite_config.retry,
        max_retries=self.suite_config.max_retries,
        minimum_duts=self.suite_config.minimum_duts,
        suite_min_duts=self.suite_config.suite_min_duts,
        suite_args=self.suite_config.suite_args,
        offload_failures_only=self.suite_config.offload_failures_only,
        debug=not self.TestsEnabled(self._run),
        subsystems=subsystems,
        skip_duts_check=skip_duts_check,
        job_keyvals=self.GetJobKeyvals(),
        test_args=test_args)

    if config_lib.IsCQType(self._run.config.build_type):
      self.ReportHWTestResults(cmd_result.json_dump_result, build_id, db)

    subsys_tuple = self.GenerateSubsysResult(cmd_result.json_dump_result,
                                             subsystems)
    if db:
      if not subsys_tuple:
        db.InsertBuildMessage(build_id, message_type=constants.SUBSYSTEMS,
                              message_subtype=constants.SUBSYSTEM_UNUSED,
                              board=self._current_board)
      else:
        logging.info('pass_subsystems: %s, fail_subsystems: %s',
                     subsys_tuple[0], subsys_tuple[1])
        for s in subsys_tuple[0]:
          db.InsertBuildMessage(build_id, message_type=constants.SUBSYSTEMS,
                                message_subtype=constants.SUBSYSTEM_PASS,
                                message_value=str(s), board=self._current_board)
        for s in subsys_tuple[1]:
          db.InsertBuildMessage(build_id, message_type=constants.SUBSYSTEMS,
                                message_subtype=constants.SUBSYSTEM_FAIL,
                                message_value=str(s), board=self._current_board)
    if cmd_result.to_raise:
      raise cmd_result.to_raise