def main(argv):
  parser = GetParser()
  options = parser.parse_args(argv)
  options.Freeze()

  local_manifest = ResolveLocalManifestPath(options)

  if local_manifest:
    logging.info('Using local_manifest: %s', local_manifest)

  if options.manifest_url:
    manifest_url = options.manifest_url
  elif options.external:
    manifest_url = config_lib.GetSiteParams().MANIFEST_URL
  else:
    manifest_url = config_lib.GetSiteParams().MANIFEST_INT_URL

  osutils.SafeMakedirs(options.repo_root)
  repo = repository.RepoRepository(
      manifest_repo_url=manifest_url,
      directory=options.repo_root,
      branch=options.branch,
      git_cache_dir=options.git_cache_dir,
      repo_url=options.repo_url,
      groups=options.groups)

  if options.copy_repo:
    repo.PreLoad(options.copy_repo)

  if repository.IsARepoRoot(options.repo_root):
    repo.BuildRootGitCleanup(prune_all=True)

  repo.Sync(local_manifest=local_manifest, detach=True)

  if options.gerrit_patches:
    patches = gerrit.GetGerritPatchInfo(options.gerrit_patches)
    # TODO: Extract patches from manifest synced.

    helper_pool = patch_series.HelperPool.SimpleCreate(
        cros_internal=not options.external, cros=True)

    series = patch_series.PatchSeries(
        path=options.repo_root, helper_pool=helper_pool, forced_manifest=None)

    _, failed_tot, failed_inflight = series.Apply(patches)

    failed = failed_tot + failed_inflight
    if failed:
      logging.error('Failed to apply: %s', ', '.join(str(p) for p in failed))
      return 1
Beispiel #2
0
    def __init__(self, options, bots, local_patches):
        """Construct the object.

    Args:
      options: The parsed options passed into cbuildbot.
      bots: A list of configs to run tryjobs for.
      local_patches: A list of LocalPatch objects.
    """
        self.options = options
        self.use_buildbucket = options.use_buildbucket
        self.user = getpass.getuser()
        self.repo_cache = cache.DiskCache(self.options.cache_dir)
        cwd = os.path.dirname(os.path.realpath(__file__))
        self.user_email = git.GetProjectUserEmail(cwd)
        logging.info('Using email:%s', self.user_email)
        # Name of the job that appears on the waterfall.
        patch_list = options.gerrit_patches + options.local_patches
        self.name = options.remote_description
        if self.name is None:
            self.name = ''
            if options.branch != 'master':
                self.name = '[%s] ' % options.branch

            self.name += ','.join(patch_list[:self.MAX_PATCHES_IN_DESCRIPTION])
            if len(patch_list) > self.MAX_PATCHES_IN_DESCRIPTION:
                remaining_patches = len(
                    patch_list) - self.MAX_PATCHES_IN_DESCRIPTION
                self.name += '... (%d more CLs)' % (remaining_patches, )

        self.bots = bots[:]
        self.slaves_request = options.slaves
        self.description = ('name: %s\n patches: %s\nbots: %s' %
                            (self.name, patch_list, self.bots))
        self.extra_args = options.pass_through_args
        if '--buildbot' not in self.extra_args:
            self.extra_args.append('--remote-trybot')

        self.extra_args.append('--remote-version=%s' %
                               (self.TRYJOB_FORMAT_VERSION, ))
        self.local_patches = local_patches
        self.repo_url = self.EXTERNAL_URL
        self.cache_key = ('trybot', )
        self.manifest = None
        if repository.IsARepoRoot(options.sourceroot):
            self.manifest = git.ManifestCheckout.Cached(options.sourceroot)
            if repository.IsInternalRepoCheckout(options.sourceroot):
                self.repo_url = self.INTERNAL_URL
                self.cache_key = ('trybot-internal', )
Beispiel #3
0
def main(argv):
    # We get false positives with the options object.
    # pylint: disable=attribute-defined-outside-init

    # Turn on strict sudo checks.
    cros_build_lib.STRICT_SUDO = True

    # Set umask to 022 so files created by buildbot are readable.
    os.umask(0o22)

    parser = _CreateParser()
    options = ParseCommandLine(parser, argv)

    # Fetch our site_config now, because we need it to do anything else.
    site_config = config_lib.GetConfig()

    _PostParseCheck(parser, options, site_config)

    cros_build_lib.AssertOutsideChroot()

    if options.enable_buildbot_tags:
        logging.EnableBuildbotMarkers()

    if (options.buildbot and not options.debug
            and not options.build_config_name == constants.BRANCH_UTIL_CONFIG
            and not cros_build_lib.HostIsCIBuilder()):
        # --buildbot can only be used on a real builder, unless it's debug, or
        # 'branch-util'.
        cros_build_lib.Die('This host is not a supported build machine.')

    # Only one config arg is allowed in this mode, which was confirmed earlier.
    build_config = site_config[options.build_config_name]

    # TODO: Re-enable this block when reference_repo support handles this
    #       properly. (see chromium:330775)
    # if options.reference_repo is None:
    #   repo_path = os.path.join(options.sourceroot, '.repo')
    #   # If we're being run from a repo checkout, reuse the repo's git pool to
    #   # cut down on sync time.
    #   if os.path.exists(repo_path):
    #     options.reference_repo = options.sourceroot

    if options.reference_repo:
        if not os.path.exists(options.reference_repo):
            parser.error('Reference path %s does not exist' %
                         (options.reference_repo, ))
        elif not os.path.exists(os.path.join(options.reference_repo, '.repo')):
            parser.error('Reference path %s does not look to be the base of a '
                         'repo checkout; no .repo exists in the root.' %
                         (options.reference_repo, ))

    if (options.buildbot or options.remote_trybot) and not options.resume:
        if not options.cgroups:
            parser.error(
                'Options --buildbot/--remote-trybot and --nocgroups cannot '
                'be used together.  Cgroup support is required for '
                'buildbot/remote-trybot mode.')
        if not cgroups.Cgroup.IsSupported():
            parser.error(
                'Option --buildbot/--remote-trybot was given, but this '
                'system does not support cgroups.  Failing.')

        missing = osutils.FindMissingBinaries(_BUILDBOT_REQUIRED_BINARIES)
        if missing:
            parser.error(
                'Option --buildbot/--remote-trybot requires the following '
                "binaries which couldn't be found in $PATH: %s" %
                (', '.join(missing)))

    if options.reference_repo:
        options.reference_repo = os.path.abspath(options.reference_repo)

    # Sanity check of buildroot- specifically that it's not pointing into the
    # midst of an existing repo since git-repo doesn't support nesting.
    if (not repository.IsARepoRoot(options.buildroot)
            and git.FindRepoDir(options.buildroot)):
        cros_build_lib.Die(
            'Configured buildroot %s is a subdir of an existing repo checkout.'
            % options.buildroot)

    if not options.log_dir:
        options.log_dir = os.path.join(options.buildroot, _DEFAULT_LOG_DIR)

    log_file = None
    if options.tee:
        log_file = os.path.join(options.log_dir, _BUILDBOT_LOG_FILE)
        osutils.SafeMakedirs(options.log_dir)
        _BackupPreviousLog(log_file)

    with cros_build_lib.ContextManagerStack() as stack:
        options.preserve_paths = set()
        if log_file is not None:
            # We don't want the critical section to try to clean up the tee process,
            # so we run Tee (forked off) outside of it. This prevents a deadlock
            # because the Tee process only exits when its pipe is closed, and the
            # critical section accidentally holds on to that file handle.
            stack.Add(tee.Tee, log_file)
            options.preserve_paths.add(_DEFAULT_LOG_DIR)

        critical_section = stack.Add(cleanup.EnforcedCleanupSection)
        stack.Add(sudo.SudoKeepAlive)

        if not options.resume:
            # If we're in resume mode, use our parents tempdir rather than
            # nesting another layer.
            stack.Add(osutils.TempDir, prefix='cbuildbot-tmp', set_global=True)
            logging.debug('Cbuildbot tempdir is %r.', os.environ.get('TMP'))

        if options.cgroups:
            stack.Add(cgroups.SimpleContainChildren, 'cbuildbot')

        # Mark everything between EnforcedCleanupSection and here as having to
        # be rolled back via the contextmanager cleanup handlers.  This
        # ensures that sudo bits cannot outlive cbuildbot, that anything
        # cgroups would kill gets killed, etc.
        stack.Add(critical_section.ForkWatchdog)

        if options.mock_tree_status is not None:
            stack.Add(_ObjectMethodPatcher,
                      tree_status,
                      '_GetStatus',
                      return_value=options.mock_tree_status)

        if options.mock_slave_status is not None:
            with open(options.mock_slave_status, 'r') as f:
                mock_statuses = pickle.load(f)
                for key, value in mock_statuses.iteritems():
                    mock_statuses[key] = builder_status_lib.BuilderStatus(
                        **value)
            stack.Add(_ObjectMethodPatcher,
                      completion_stages.MasterSlaveSyncCompletionStage,
                      '_FetchSlaveStatuses',
                      return_value=mock_statuses)

        stack.Add(_SetupConnections, options, build_config)
        retry_stats.SetupStats()

        timeout_display_message = None
        # For master-slave builds: Update slave's timeout using master's published
        # deadline.
        if options.buildbot and options.master_build_id is not None:
            slave_timeout = None
            if cidb.CIDBConnectionFactory.IsCIDBSetup():
                cidb_handle = cidb.CIDBConnectionFactory.GetCIDBConnectionForBuilder(
                )
                if cidb_handle:
                    slave_timeout = cidb_handle.GetTimeToDeadline(
                        options.master_build_id)

            if slave_timeout is not None:
                # We artificially set a minimum slave_timeout because '0' is handled
                # specially, and because we don't want to timeout while trying to set
                # things up.
                slave_timeout = max(slave_timeout, 20)
                if options.timeout == 0 or slave_timeout < options.timeout:
                    logging.info(
                        'Updating slave build timeout to %d seconds enforced '
                        'by the master', slave_timeout)
                    options.timeout = slave_timeout
                    timeout_display_message = (
                        'This build has reached the timeout deadline set by the master. '
                        'Either this stage or a previous one took too long (see stage '
                        'timing historical summary in ReportStage) or the build failed '
                        'to start on time.')
            else:
                logging.warning(
                    'Could not get master deadline for master-slave build. '
                    'Can not set slave timeout.')

        if options.timeout > 0:
            stack.Add(timeout_util.FatalTimeout, options.timeout,
                      timeout_display_message)
        try:
            _RunBuildStagesWrapper(options, site_config, build_config)
        except failures_lib.ExitEarlyException as ex:
            # This build finished successfully. Do not re-raise ExitEarlyException.
            logging.info('One stage exited early: %s', ex)