def testAnnotateFailingBuilders(self):
    """Tests that _AnnotateFailingBuilders is free of syntax errors."""
    stage = self.ConstructStage()

    annotate_mock = self.PatchObject(
        completion_stages.MasterSlaveSyncCompletionStage,
        '_AnnotateNoStatBuilders')

    failing = {'failing_build'}
    inflight = {'inflight_build'}
    no_stat = {'no_stat_build'}
    failed_msg = build_failure_message.BuildFailureMessage(
        'message', [], True, 'reason', 'bot')
    failed_status = builder_status_lib.BuilderStatus(
        'failed', failed_msg, 'url')
    inflight_status = builder_status_lib.BuilderStatus('inflight', None, 'url')
    statuses = {'failing_build' : failed_status,
                'inflight_build': inflight_status}

    stage._AnnotateFailingBuilders(failing, inflight, set(), statuses, {},
                                   False)
    self.assertEqual(annotate_mock.call_count, 1)

    stage._AnnotateFailingBuilders(failing, inflight, no_stat, statuses, {},
                                   False)
    self.assertEqual(annotate_mock.call_count, 2)

    stage._AnnotateFailingBuilders(failing, inflight, no_stat, statuses, {},
                                   True)
    self.assertEqual(annotate_mock.call_count, 3)
  def VerifyStage(self, failing=(), inflight=(), no_stat=(), alert=False,
                  stage=None, build_passed=False):
    """Runs and Verifies PerformStage.

    Args:
      failing: The names of the builders that failed.
      inflight: The names of the buiders that timed out.
      no_stat: The names of the builders that had status None.
      alert: If True, sends out an alert email for infra failures.
      stage: If set, use this constructed stage, otherwise create own.
      build_passed: Whether the build passed or failed.
    """
    if not stage:
      stage = self.ConstructStage()

    stage._run.attrs.manifest_manager = mock.Mock()

    # Setup the stage to look at the specified configs.
    all_slaves = set(failing) | set(inflight) | set(no_stat)
    configs = [config_lib.BuildConfig(name=x) for x in all_slaves]
    self.PatchObject(stage, '_GetSlaveConfigs', return_value=configs)

    statuses = {}
    for x in failing:
      statuses[x] = builder_status_lib.BuilderStatus(
          constants.BUILDER_STATUS_FAILED, message=None)
    for x in inflight:
      statuses[x] = builder_status_lib.BuilderStatus(
          constants.BUILDER_STATUS_INFLIGHT, message=None)
    for x in no_stat:
      statuses[x] = builder_status_lib.BuilderStatus(
          constants.BUILDER_STATUS_MISSING, message=None)

    self.PatchObject(builder_status_lib.BuilderStatusesFetcher,
                     'GetBuilderStatuses', return_value=(statuses, {}))

    # Track whether 'HandleSuccess' is called.
    success_mock = self.PatchObject(stage, 'HandleSuccess')

    # Actually run the stage.
    if build_passed:
      stage.PerformStage()
    else:
      with self.assertRaises(completion_stages.ImportantBuilderFailedException):
        stage.PerformStage()

    # Verify the calls.
    self.assertEqual(success_mock.called, build_passed)

    if not build_passed and self._run.config.master:
      if alert:
        self.alert_email_mock.assert_called_once_with(
            mock.ANY, mock.ANY, server=mock.ANY, message=mock.ANY,
            extra_fields=mock.ANY)
 def _CreateBuilderStatusDict(self):
     passed = builder_status_lib.BuilderStatus(
         constants.BUILDER_STATUS_PASSED, None)
     failed = builder_status_lib.BuilderStatus(
         constants.BUILDER_STATUS_FAILED, mock.Mock())
     inflight = builder_status_lib.BuilderStatus(
         constants.BUILDER_STATUS_INFLIGHT, mock.Mock())
     missing = builder_status_lib.BuilderStatus(
         constants.BUILDER_STATUS_MISSING, None)
     return {
         'passed': passed,
         'failed': failed,
         'inflight': inflight,
         'missing': missing
     }
  def testStageRunWithImportantBuilderFailedException(self):
    """Test stage.Run on master-paladin with ImportantBuilderFailedException."""
    stage = self.ConstructStage()
    stage._run.attrs.manifest_manager = mock.MagicMock()
    statuses = {
        'build_1': builder_status_lib.BuilderStatus(
            constants.BUILDER_STATUS_INFLIGHT, None),
        'build_2': builder_status_lib.BuilderStatus(
            constants.BUILDER_STATUS_MISSING, None)
    }
    self.PatchObject(builder_status_lib.BuilderStatusesFetcher,
                     'GetBuilderStatuses', return_value=(statuses, {}))

    with self.assertRaises(completion_stages.ImportantBuilderFailedException):
      stage.Run()
 def _PatchesForGetSlaveBuilderStatus(self, status_dict):
     self.PatchObject(builder_status_lib.SlaveBuilderStatus,
                      '__init__',
                      return_value=None)
     message_mock = mock.Mock()
     message_mock.BuildFailureMessageToStr.return_value = 'failure_message_str'
     build_statuses = {
         x: builder_status_lib.BuilderStatus(status_dict[x].status,
                                             message_mock)
         for x in status_dict
     }
     self.PatchObject(builder_status_lib.SlaveBuilderStatus,
                      'GetBuilderStatusForBuild',
                      side_effect=lambda config: build_statuses[config])
  def testAnnotateFailingExperimentalBuilders(self):
    """Tests _AnnotateFailingBuilders with experimental builders."""
    stage = self.ConstructStage()

    print_build_message_mock = self.PatchObject(
        completion_stages.MasterSlaveSyncCompletionStage,
        '_PrintBuildMessage')

    failed_msg = build_failure_message.BuildFailureMessage(
        'message', [], True, 'reason', 'bot')
    experimental_statuses = {
        'passed_experimental' : builder_status_lib.BuilderStatus(
            constants.BUILDER_STATUS_PASSED, None, 'url'),
        'failing_experimental' : builder_status_lib.BuilderStatus(
            constants.BUILDER_STATUS_FAILED, failed_msg, 'url'),
        'inflight_experimental': builder_status_lib.BuilderStatus(
            constants.BUILDER_STATUS_INFLIGHT, None, 'url')
    }

    stage._AnnotateFailingBuilders(set(), set(), set(), {},
                                   experimental_statuses, False)
    # Build message should not be printed for the passed builder.
    self.assertEqual(print_build_message_mock.call_count, 2)
  def testPerformStageWithFailedExperimentalBuilder(self):
    """Test PerformStage with a failed experimental builder."""
    stage = self.ConstructStage()
    stage._run.attrs.manifest_manager = mock.MagicMock()
    status = {
        'build_1': builder_status_lib.BuilderStatus(
            constants.BUILDER_STATUS_PASSED, None)
    }
    experimental_status = {
        'build_2': builder_status_lib.BuilderStatus(
            constants.BUILDER_STATUS_FAILED, None)
    }
    self.PatchObject(builder_status_lib.BuilderStatusesFetcher,
                     'GetBuilderStatuses',
                     return_value=(status, experimental_status))
    mock_annotate = self.PatchObject(
        completion_stages.MasterSlaveSyncCompletionStage,
        '_AnnotateFailingBuilders')

    stage._run.attrs.metadata.UpdateWithDict(
        {constants.METADATA_EXPERIMENTAL_BUILDERS: ['build_2']})
    stage.PerformStage()
    mock_annotate.assert_called_once_with(
        set(), set(), set(), status, experimental_status, False)
  def testPerformStageWithFatalFailure(self):
    """Test PerformStage on master-paladin."""
    stage = self.ConstructStage()

    stage._run.attrs.manifest_manager = mock.MagicMock()

    statuses = {
        'build_1': builder_status_lib.BuilderStatus(
            constants.BUILDER_STATUS_INFLIGHT, None),
        'build_2': builder_status_lib.BuilderStatus(
            constants.BUILDER_STATUS_MISSING, None)
    }

    self.PatchObject(builder_status_lib.BuilderStatusesFetcher,
                     'GetBuilderStatuses', return_value=(statuses, {}))
    mock_annotate = self.PatchObject(
        completion_stages.MasterSlaveSyncCompletionStage,
        '_AnnotateFailingBuilders')

    with self.assertRaises(completion_stages.ImportantBuilderFailedException):
      stage.PerformStage()
    mock_annotate.assert_called_once_with(
        set(), {'build_1'}, {'build_2'}, statuses, {}, False)
    self.mock_handle_failure.assert_called_once_with(
        set(), {'build_1'}, {'build_2'}, False)

    mock_annotate.reset_mock()
    self.mock_handle_failure.reset_mock()
    stage._run.attrs.metadata.UpdateWithDict(
        {constants.SELF_DESTRUCTED_BUILD: True})
    with self.assertRaises(completion_stages.ImportantBuilderFailedException):
      stage.PerformStage()
    mock_annotate.assert_called_once_with(
        set(), {'build_1'}, {'build_2'}, statuses, {}, True)
    self.mock_handle_failure.assert_called_once_with(
        set(), {'build_1'}, {'build_2'}, True)
예제 #9
0
    def VerifyStage(self,
                    failing,
                    inflight,
                    no_stat,
                    handle_failure=False,
                    handle_timeout=False,
                    sane_tot=True,
                    stage=None,
                    all_slaves=None,
                    slave_stages=None,
                    fatal=True,
                    self_destructed=False):
        """Runs and Verifies PerformStage.

    Args:
      failing: The names of the builders that failed.
      inflight: The names of the buiders that timed out.
      no_stat: The names of the builders that had no status.
      handle_failure: If True, calls HandleValidationFailure.
      handle_timeout: If True, calls HandleValidationTimeout.
      sane_tot: If not true, assumes TOT is not sane.
      stage: If set, use this constructed stage, otherwise create own.
      all_slaves: Optional set of all slave configs.
      slave_stages: Optional list of slave stages.
      fatal: Optional boolean indicating whether the completion_stage failed
        with fatal. Default to True.
      self_destructed: Optional boolean indicating whether the completion_stage
        self_destructed. Default to False.
    """
        if not stage:
            stage = self.ConstructStage()

        stage._run.attrs.metadata.UpdateWithDict(
            {constants.SELF_DESTRUCTED_BUILD: self_destructed})

        # Setup the stage to look at the specified configs.
        all_slaves = list(all_slaves or set(failing + inflight + no_stat))
        all_started_slaves = list(all_slaves or set(failing + inflight))
        configs = [config_lib.BuildConfig(name=x) for x in all_slaves]
        self.PatchObject(stage, '_GetSlaveConfigs', return_value=configs)

        statuses = {}
        for x in failing:
            statuses[x] = builder_status_lib.BuilderStatus(
                constants.BUILDER_STATUS_FAILED, message=None)
        for x in inflight:
            statuses[x] = builder_status_lib.BuilderStatus(
                constants.BUILDER_STATUS_INFLIGHT, message=None)
        for x in no_stat:
            statuses[x] = builder_status_lib.BuilderStatus(
                constants.BUILDER_STATUS_MISSING, message=None)
        self.completion_stage.GetSlaveStatuses.return_value = statuses
        self.completion_stage.GetFatal.return_value = fatal

        # Setup DB and provide list of slave stages.
        mock_cidb = mock.MagicMock()
        cidb.CIDBConnectionFactory.SetupMockCidb(mock_cidb)
        if slave_stages is None:
            slave_stages = []
            critical_stages = (
                relevant_changes.TriageRelevantChanges.STAGE_SYNC)
            for stage_name, slave in itertools.product(critical_stages,
                                                       all_started_slaves):
                slave_stages.append({
                    'name': stage_name,
                    'build_config': slave,
                    'status': constants.BUILDER_STATUS_PASSED
                })
        self.PatchObject(mock_cidb,
                         'GetSlaveStages',
                         return_value=slave_stages)

        # Set up SubmitPartialPool to provide a list of changes to look at.
        self.PatchObject(stage.sync_stage.pool,
                         'SubmitPartialPool',
                         return_value=self.other_changes)

        # Actually run the stage.
        stage.PerformStage()

        if fatal:
            stage.sync_stage.pool.submit_pool_mock.assert_not_called()
            self.mock_record_metrics.assert_called_once_with(False)
        else:
            stage.sync_stage.pool.submit_pool_mock.assert_called_once_with(
                reason=constants.STRATEGY_CQ_SUCCESS)
            self.mock_record_metrics.assert_called_once_with(True)

        if handle_failure:
            stage.sync_stage.pool.handle_failure_mock.assert_called_once_with(
                mock.ANY,
                no_stat=set(no_stat),
                sanity=sane_tot,
                changes=self.other_changes,
                failed_hwtests=mock.ANY)

        if handle_timeout:
            stage.sync_stage.pool.handle_timeout_mock.assert_called_once_with(
                sanity=mock.ANY, changes=self.other_changes)
예제 #10
0
def main(argv):
    # We get false positives with the options object.
    # pylint: disable=attribute-defined-outside-init

    # Turn on strict sudo checks.
    cros_build_lib.STRICT_SUDO = True

    # Set umask to 022 so files created by buildbot are readable.
    os.umask(0o22)

    parser = _CreateParser()
    options = ParseCommandLine(parser, argv)

    # Fetch our site_config now, because we need it to do anything else.
    site_config = config_lib.GetConfig()

    _PostParseCheck(parser, options, site_config)

    cros_build_lib.AssertOutsideChroot()

    if options.enable_buildbot_tags:
        logging.EnableBuildbotMarkers()

    if (options.buildbot and not options.debug
            and not options.build_config_name == constants.BRANCH_UTIL_CONFIG
            and not cros_build_lib.HostIsCIBuilder()):
        # --buildbot can only be used on a real builder, unless it's debug, or
        # 'branch-util'.
        cros_build_lib.Die('This host is not a supported build machine.')

    # Only one config arg is allowed in this mode, which was confirmed earlier.
    build_config = site_config[options.build_config_name]

    # TODO: Re-enable this block when reference_repo support handles this
    #       properly. (see chromium:330775)
    # if options.reference_repo is None:
    #   repo_path = os.path.join(options.sourceroot, '.repo')
    #   # If we're being run from a repo checkout, reuse the repo's git pool to
    #   # cut down on sync time.
    #   if os.path.exists(repo_path):
    #     options.reference_repo = options.sourceroot

    if options.reference_repo:
        if not os.path.exists(options.reference_repo):
            parser.error('Reference path %s does not exist' %
                         (options.reference_repo, ))
        elif not os.path.exists(os.path.join(options.reference_repo, '.repo')):
            parser.error('Reference path %s does not look to be the base of a '
                         'repo checkout; no .repo exists in the root.' %
                         (options.reference_repo, ))

    if (options.buildbot or options.remote_trybot) and not options.resume:
        if not options.cgroups:
            parser.error(
                'Options --buildbot/--remote-trybot and --nocgroups cannot '
                'be used together.  Cgroup support is required for '
                'buildbot/remote-trybot mode.')
        if not cgroups.Cgroup.IsSupported():
            parser.error(
                'Option --buildbot/--remote-trybot was given, but this '
                'system does not support cgroups.  Failing.')

        missing = osutils.FindMissingBinaries(_BUILDBOT_REQUIRED_BINARIES)
        if missing:
            parser.error(
                'Option --buildbot/--remote-trybot requires the following '
                "binaries which couldn't be found in $PATH: %s" %
                (', '.join(missing)))

    if options.reference_repo:
        options.reference_repo = os.path.abspath(options.reference_repo)

    # Sanity check of buildroot- specifically that it's not pointing into the
    # midst of an existing repo since git-repo doesn't support nesting.
    if (not repository.IsARepoRoot(options.buildroot)
            and git.FindRepoDir(options.buildroot)):
        cros_build_lib.Die(
            'Configured buildroot %s is a subdir of an existing repo checkout.'
            % options.buildroot)

    if not options.log_dir:
        options.log_dir = os.path.join(options.buildroot, _DEFAULT_LOG_DIR)

    log_file = None
    if options.tee:
        log_file = os.path.join(options.log_dir, _BUILDBOT_LOG_FILE)
        osutils.SafeMakedirs(options.log_dir)
        _BackupPreviousLog(log_file)

    with cros_build_lib.ContextManagerStack() as stack:
        options.preserve_paths = set()
        if log_file is not None:
            # We don't want the critical section to try to clean up the tee process,
            # so we run Tee (forked off) outside of it. This prevents a deadlock
            # because the Tee process only exits when its pipe is closed, and the
            # critical section accidentally holds on to that file handle.
            stack.Add(tee.Tee, log_file)
            options.preserve_paths.add(_DEFAULT_LOG_DIR)

        critical_section = stack.Add(cleanup.EnforcedCleanupSection)
        stack.Add(sudo.SudoKeepAlive)

        if not options.resume:
            # If we're in resume mode, use our parents tempdir rather than
            # nesting another layer.
            stack.Add(osutils.TempDir, prefix='cbuildbot-tmp', set_global=True)
            logging.debug('Cbuildbot tempdir is %r.', os.environ.get('TMP'))

        if options.cgroups:
            stack.Add(cgroups.SimpleContainChildren, 'cbuildbot')

        # Mark everything between EnforcedCleanupSection and here as having to
        # be rolled back via the contextmanager cleanup handlers.  This
        # ensures that sudo bits cannot outlive cbuildbot, that anything
        # cgroups would kill gets killed, etc.
        stack.Add(critical_section.ForkWatchdog)

        if options.mock_tree_status is not None:
            stack.Add(_ObjectMethodPatcher,
                      tree_status,
                      '_GetStatus',
                      return_value=options.mock_tree_status)

        if options.mock_slave_status is not None:
            with open(options.mock_slave_status, 'r') as f:
                mock_statuses = pickle.load(f)
                for key, value in mock_statuses.iteritems():
                    mock_statuses[key] = builder_status_lib.BuilderStatus(
                        **value)
            stack.Add(_ObjectMethodPatcher,
                      completion_stages.MasterSlaveSyncCompletionStage,
                      '_FetchSlaveStatuses',
                      return_value=mock_statuses)

        stack.Add(_SetupConnections, options, build_config)
        retry_stats.SetupStats()

        timeout_display_message = None
        # For master-slave builds: Update slave's timeout using master's published
        # deadline.
        if options.buildbot and options.master_build_id is not None:
            slave_timeout = None
            if cidb.CIDBConnectionFactory.IsCIDBSetup():
                cidb_handle = cidb.CIDBConnectionFactory.GetCIDBConnectionForBuilder(
                )
                if cidb_handle:
                    slave_timeout = cidb_handle.GetTimeToDeadline(
                        options.master_build_id)

            if slave_timeout is not None:
                # We artificially set a minimum slave_timeout because '0' is handled
                # specially, and because we don't want to timeout while trying to set
                # things up.
                slave_timeout = max(slave_timeout, 20)
                if options.timeout == 0 or slave_timeout < options.timeout:
                    logging.info(
                        'Updating slave build timeout to %d seconds enforced '
                        'by the master', slave_timeout)
                    options.timeout = slave_timeout
                    timeout_display_message = (
                        'This build has reached the timeout deadline set by the master. '
                        'Either this stage or a previous one took too long (see stage '
                        'timing historical summary in ReportStage) or the build failed '
                        'to start on time.')
            else:
                logging.warning(
                    'Could not get master deadline for master-slave build. '
                    'Can not set slave timeout.')

        if options.timeout > 0:
            stack.Add(timeout_util.FatalTimeout, options.timeout,
                      timeout_display_message)
        try:
            _RunBuildStagesWrapper(options, site_config, build_config)
        except failures_lib.ExitEarlyException as ex:
            # This build finished successfully. Do not re-raise ExitEarlyException.
            logging.info('One stage exited early: %s', ex)