def __init__(self, repo_url, directory, branch=None, referenced_repo=None, manifest=constants.DEFAULT_MANIFEST, depth=None): self.repo_url = repo_url self.directory = directory self.branch = branch # It's perfectly acceptable to pass in a reference pathway that isn't # usable. Detect it, and suppress the setting so that any depth # settings aren't disabled due to it. if referenced_repo is not None: if depth is not None: raise ValueError("referenced_repo and depth are mutually exclusive " "options; please pick one or the other.") if not IsARepoRoot(referenced_repo): referenced_repo = None self._referenced_repo = referenced_repo self._manifest = manifest # If the repo exists already, force a selfupdate as the first step. self._repo_update_needed = IsARepoRoot(self.directory) if not self._repo_update_needed and git.FindRepoDir(self.directory): raise ValueError('Given directory %s is not the root of a repository.' % self.directory) self._depth = int(depth) if depth is not None else None
def __init__(self, manifest_repo_url, directory, branch=None, referenced_repo=None, manifest=constants.DEFAULT_MANIFEST, depth=None, repo_url=site_config.params.REPO_URL, repo_branch=None, groups=None, repo_cmd='repo', preserve_paths=()): """Initialize. Args: manifest_repo_url: URL to fetch repo manifest from. directory: local path where to checkout the repository. branch: Branch to check out the manifest at. referenced_repo: Repository to reference for git objects, if possible. manifest: Which manifest.xml within the branch to use. Effectively default.xml if not given. depth: Mutually exclusive option to referenced_repo; this limits the checkout to a max commit history of the given integer. repo_url: URL to fetch repo tool from. repo_branch: Branch to check out the repo tool at. groups: Only sync projects that match this filter. repo_cmd: Name of repo_cmd to use. preserve_paths: paths need to be preserved in repo clean in case we want to clean and retry repo sync. """ self.manifest_repo_url = manifest_repo_url self.repo_url = repo_url self.repo_branch = repo_branch self.directory = directory self.branch = branch self.groups = groups self.repo_cmd = repo_cmd self.preserve_paths = preserve_paths # It's perfectly acceptable to pass in a reference pathway that isn't # usable. Detect it, and suppress the setting so that any depth # settings aren't disabled due to it. if referenced_repo is not None: if depth is not None: raise ValueError( "referenced_repo and depth are mutually exclusive " "options; please pick one or the other.") if not IsARepoRoot(referenced_repo): referenced_repo = None self._referenced_repo = referenced_repo self._manifest = manifest # If the repo exists already, force a selfupdate as the first step. self._repo_update_needed = IsARepoRoot(self.directory) if not self._repo_update_needed and git.FindRepoDir(self.directory): raise ValueError( 'Given directory %s is not the root of a repository.' % self.directory) self._depth = int(depth) if depth is not None else None
def _IsGitStoreInRepo(path): """Checks if the git repo rooted at a directory is in repo's storage. Note that just because a pathway is in .repo, does *not* mean that repo can actually use it (the git repo must be in the manifest for that to be true). """ repo_dir = os.path.realpath(git.FindRepoDir(path)) git_objects_dir = os.path.realpath(os.path.join(path, '.git/objects')) return git_objects_dir.startswith(repo_dir)
def AssertNotNested(self): """Assert that the current repository isn't inside another repository. Since repo detects it's root by looking for .repo, it can't support having one repo inside another. """ if not IsARepoRoot(self.directory): repo_root = git.FindRepoDir(self.directory) if repo_root: raise ValueError('%s is nested inside a repo at %s.' % (self.directory, repo_root))
def _GetChrootPath(self, path): """Translates a fully-expanded host |path| into a chroot equivalent. This checks path prefixes in order from the most to least "contained": the chroot itself, then the cache directory, and finally the source tree. The idea is to return the shortest possible chroot equivalent. Args: path: A host path to translate. Returns: An equivalent chroot path. Raises: ValueError: If |path| is not reachable from the chroot. """ new_path = None # Preliminary: compute the actual source and chroot paths to use. These are # generally the precomputed values, unless we're inferring the source root # from the path itself. source_path = self._source_path chroot_path = self._chroot_path chroot_link = self._chroot_link if self._source_from_path_repo: path_repo_dir = git.FindRepoDir(path) if path_repo_dir is not None: source_path = os.path.abspath(os.path.join( path_repo_dir, '..')) chroot_path = self._GetSourcePathChroot(source_path) chroot_link = self._ReadChrootLink(chroot_path) # First, check if the path happens to be in the chroot already. if chroot_path is not None: new_path = self._TranslatePath(path, chroot_path, '/') # Or in the symlinked dir. if new_path is None and chroot_link is not None: new_path = self._TranslatePath(path, chroot_link, '/') # Second, check the cache directory. if new_path is None: new_path = self._TranslatePath(path, self._GetCachePath(), constants.CHROOT_CACHE_ROOT) # Finally, check the current SDK checkout tree. if new_path is None and source_path is not None: new_path = self._TranslatePath(path, source_path, constants.CHROOT_SOURCE_ROOT) if new_path is None: raise ValueError('Path is not reachable from the chroot') return new_path
def setUp(self): testdata = os.path.dirname(os.path.abspath(__file__)) self.test_base = os.path.join(testdata, 'testdata/chrome_set_ver_unittest') self.repo_root = os.path.join(self.tempdir, 'repo_root') self.repo_root_zip = os.path.join(self.test_base, 'repo_root.tar.gz') assert (not os.path.exists(self.repo_root)) print('Unzipping test repo_root') cros_build_lib.RunCommand(['tar', '-xvf', self.repo_root_zip], cwd=self.tempdir, redirect_stdout=True) assert (os.path.exists(self.repo_root)) os.chdir(self.repo_root) repo_root = git.FindRepoDir(os.getcwd()) assert (os.path.realpath(os.path.dirname(repo_root)) == self.repo_root)
def __init__(self, output): """Set up the tools system. Args: output: cros_output object to use for output. Raises: IOError: Unable to find .repo directory """ # Detect whether we're inside a chroot or not self.in_chroot = cros_build_lib.IsInsideChroot() self._out = output self._root = None if self.in_chroot: root_dir = os.getenv('CROS_WORKON_SRCROOT') else: repo = git.FindRepoDir('.') if not repo: raise IOError( 'Cannot find .repo directory (must be below cwd level)') root_dir = os.path.dirname(repo) self._SetRoot(root_dir) self._out.Info("Chroot is at '%s'" % self.chroot_path) self._tools = { 'make_bmp_image': '##/usr/share/vboot/bitmaps/make_bmp_images.sh', 'bct_dump': '##/usr/bin/bct_dump', 'tegrarcm': '##/usr/bin/tegrarcm', 'gbb_utility': '##/usr/bin/gbb_utility', 'cbfstool': '##/usr/bin/cbfstool', 'fdisk': '##/sbin/fdisk', } self.outdir = None # We have no output directory yet self._delete_tempdir = None # And no temporary directory to delete self.search_paths = []
def main(argv): parser = GetParser() options = parser.parse_args(argv) repo_dir = git.FindRepoDir(os.getcwd()) if not repo_dir: parser.error("This script must be invoked from within a repository " "checkout.") options.git_config = os.path.join(repo_dir, 'manifests.git', 'config') options.local_manifest_path = os.path.join(repo_dir, 'local_manifest.xml') manifest_sym_path = os.path.join(repo_dir, 'manifest.xml') if os.path.basename(os.readlink(manifest_sym_path)) == 'minilayout.xml': _AssertNotMiniLayout() # For now, we only support the add command. assert options.command == 'add' if options.workon: if options.path is not None: parser.error('Adding workon projects do not set project.') else: if options.remote is None: parser.error('Adding non-workon projects requires a remote.') if options.path is None: parser.error('Adding non-workon projects requires a path.') name = options.project path = options.path revision = options.revision if revision is not None: if (not git.IsRefsTags(revision) and not git.IsSHA1(revision)): revision = git.StripRefsHeads(revision, False) main_manifest = git.ManifestCheckout(os.getcwd()) main_element = main_manifest.FindCheckouts(name) if path is not None: main_element_from_path = main_manifest.FindCheckoutFromPath( path, strict=False) if main_element_from_path is not None: main_element.append(main_element_from_path) local_manifest = LocalManifest.FromPath(options.local_manifest_path) if options.workon: if not main_element: parser.error('No project named %r in the default manifest.' % name) _AddProjectsToManifestGroups( options, [checkout['name'] for checkout in main_element]) elif main_element: if options.remote is not None: # Likely this project wasn't meant to be remote, so workon main element print( "Project already exists in manifest. Using that as workon project." ) _AddProjectsToManifestGroups( options, [checkout['name'] for checkout in main_element]) else: # Conflict will occur; complain. parser.error( "Requested project name=%r path=%r will conflict with " "your current manifest %s" % (name, path, main_manifest.manifest_path)) elif local_manifest.GetProject(name, path=path) is not None: parser.error("Requested project name=%r path=%r conflicts with " "your local_manifest.xml" % (name, path)) else: element = local_manifest.AddNonWorkonProject(name=name, path=path, remote=options.remote, revision=revision) _AddProjectsToManifestGroups(options, [element.attrib['name']]) with open(options.local_manifest_path, 'w') as f: f.write(local_manifest.ToString()) return 0
def main(argv): # We get false positives with the options object. # pylint: disable=attribute-defined-outside-init # Turn on strict sudo checks. cros_build_lib.STRICT_SUDO = True # Set umask to 022 so files created by buildbot are readable. os.umask(0o22) parser = _CreateParser() options = ParseCommandLine(parser, argv) # Fetch our site_config now, because we need it to do anything else. site_config = config_lib.GetConfig() _PostParseCheck(parser, options, site_config) cros_build_lib.AssertOutsideChroot() if options.enable_buildbot_tags: logging.EnableBuildbotMarkers() if (options.buildbot and not options.debug and not options.build_config_name == constants.BRANCH_UTIL_CONFIG and not cros_build_lib.HostIsCIBuilder()): # --buildbot can only be used on a real builder, unless it's debug, or # 'branch-util'. cros_build_lib.Die('This host is not a supported build machine.') # Only one config arg is allowed in this mode, which was confirmed earlier. build_config = site_config[options.build_config_name] # TODO: Re-enable this block when reference_repo support handles this # properly. (see chromium:330775) # if options.reference_repo is None: # repo_path = os.path.join(options.sourceroot, '.repo') # # If we're being run from a repo checkout, reuse the repo's git pool to # # cut down on sync time. # if os.path.exists(repo_path): # options.reference_repo = options.sourceroot if options.reference_repo: if not os.path.exists(options.reference_repo): parser.error('Reference path %s does not exist' % (options.reference_repo, )) elif not os.path.exists(os.path.join(options.reference_repo, '.repo')): parser.error('Reference path %s does not look to be the base of a ' 'repo checkout; no .repo exists in the root.' % (options.reference_repo, )) if (options.buildbot or options.remote_trybot) and not options.resume: if not options.cgroups: parser.error( 'Options --buildbot/--remote-trybot and --nocgroups cannot ' 'be used together. Cgroup support is required for ' 'buildbot/remote-trybot mode.') if not cgroups.Cgroup.IsSupported(): parser.error( 'Option --buildbot/--remote-trybot was given, but this ' 'system does not support cgroups. Failing.') missing = osutils.FindMissingBinaries(_BUILDBOT_REQUIRED_BINARIES) if missing: parser.error( 'Option --buildbot/--remote-trybot requires the following ' "binaries which couldn't be found in $PATH: %s" % (', '.join(missing))) if options.reference_repo: options.reference_repo = os.path.abspath(options.reference_repo) # Sanity check of buildroot- specifically that it's not pointing into the # midst of an existing repo since git-repo doesn't support nesting. if (not repository.IsARepoRoot(options.buildroot) and git.FindRepoDir(options.buildroot)): cros_build_lib.Die( 'Configured buildroot %s is a subdir of an existing repo checkout.' % options.buildroot) if not options.log_dir: options.log_dir = os.path.join(options.buildroot, _DEFAULT_LOG_DIR) log_file = None if options.tee: log_file = os.path.join(options.log_dir, _BUILDBOT_LOG_FILE) osutils.SafeMakedirs(options.log_dir) _BackupPreviousLog(log_file) with cros_build_lib.ContextManagerStack() as stack: options.preserve_paths = set() if log_file is not None: # We don't want the critical section to try to clean up the tee process, # so we run Tee (forked off) outside of it. This prevents a deadlock # because the Tee process only exits when its pipe is closed, and the # critical section accidentally holds on to that file handle. stack.Add(tee.Tee, log_file) options.preserve_paths.add(_DEFAULT_LOG_DIR) critical_section = stack.Add(cleanup.EnforcedCleanupSection) stack.Add(sudo.SudoKeepAlive) if not options.resume: # If we're in resume mode, use our parents tempdir rather than # nesting another layer. stack.Add(osutils.TempDir, prefix='cbuildbot-tmp', set_global=True) logging.debug('Cbuildbot tempdir is %r.', os.environ.get('TMP')) if options.cgroups: stack.Add(cgroups.SimpleContainChildren, 'cbuildbot') # Mark everything between EnforcedCleanupSection and here as having to # be rolled back via the contextmanager cleanup handlers. This # ensures that sudo bits cannot outlive cbuildbot, that anything # cgroups would kill gets killed, etc. stack.Add(critical_section.ForkWatchdog) if options.mock_tree_status is not None: stack.Add(_ObjectMethodPatcher, tree_status, '_GetStatus', return_value=options.mock_tree_status) if options.mock_slave_status is not None: with open(options.mock_slave_status, 'r') as f: mock_statuses = pickle.load(f) for key, value in mock_statuses.iteritems(): mock_statuses[key] = builder_status_lib.BuilderStatus( **value) stack.Add(_ObjectMethodPatcher, completion_stages.MasterSlaveSyncCompletionStage, '_FetchSlaveStatuses', return_value=mock_statuses) stack.Add(_SetupConnections, options, build_config) retry_stats.SetupStats() timeout_display_message = None # For master-slave builds: Update slave's timeout using master's published # deadline. if options.buildbot and options.master_build_id is not None: slave_timeout = None if cidb.CIDBConnectionFactory.IsCIDBSetup(): cidb_handle = cidb.CIDBConnectionFactory.GetCIDBConnectionForBuilder( ) if cidb_handle: slave_timeout = cidb_handle.GetTimeToDeadline( options.master_build_id) if slave_timeout is not None: # We artificially set a minimum slave_timeout because '0' is handled # specially, and because we don't want to timeout while trying to set # things up. slave_timeout = max(slave_timeout, 20) if options.timeout == 0 or slave_timeout < options.timeout: logging.info( 'Updating slave build timeout to %d seconds enforced ' 'by the master', slave_timeout) options.timeout = slave_timeout timeout_display_message = ( 'This build has reached the timeout deadline set by the master. ' 'Either this stage or a previous one took too long (see stage ' 'timing historical summary in ReportStage) or the build failed ' 'to start on time.') else: logging.warning( 'Could not get master deadline for master-slave build. ' 'Can not set slave timeout.') if options.timeout > 0: stack.Add(timeout_util.FatalTimeout, options.timeout, timeout_display_message) try: _RunBuildStagesWrapper(options, site_config, build_config) except failures_lib.ExitEarlyException as ex: # This build finished successfully. Do not re-raise ExitEarlyException. logging.info('One stage exited early: %s', ex)
def main(argv): parser = optparse.OptionParser(usage='usage: %prog add [options] <name> ' '<--workon | <path> --remote <remote> >') parser.add_option('-w', '--workon', action='store_true', dest='workon', default=False, help='Is this a workon package?') parser.add_option('-r', '--remote', dest='remote', default=None) parser.add_option('-v', '--revision', dest='revision', default=None, help="Use to override the manifest defined default " "revision used for a given project.") parser.add_option('--upgrade-minilayout', default=False, action='store_true', help="Upgrade a minilayout checkout into a full.xml " "checkout utilizing manifest groups.") (options, args) = parser.parse_args(argv) repo_dir = git.FindRepoDir(os.getcwd()) if not repo_dir: parser.error("This script must be invoked from within a repository " "checkout.") options.git_config = os.path.join(repo_dir, 'manifests.git', 'config') options.repo_dir = repo_dir options.local_manifest_path = os.path.join(repo_dir, 'local_manifest.xml') # This constant is used only when we're doing an upgrade away from # minilayout.xml to default.xml. options.default_manifest_path = os.path.join(repo_dir, 'manifests', 'default.xml') options.manifest_sym_path = os.path.join(repo_dir, 'manifest.xml') active_manifest = os.path.basename(os.readlink(options.manifest_sym_path)) upgrade_required = active_manifest == 'minilayout.xml' if options.upgrade_minilayout: if args: parser.error("--upgrade-minilayout takes no arguments.") if not upgrade_required: print "This repository checkout isn't using minilayout.xml; nothing to do" else: _UpgradeMinilayout(options) return 0 elif upgrade_required: logging.warn( "Your repository checkout is using the old minilayout.xml workflow; " "auto-upgrading it.") cros_build_lib.RunCommand([sys.argv[0], '--upgrade-minilayout'], cwd=os.getcwd(), print_cmd=False) if not args: parser.error("No command specified.") elif args[0] != 'add': parser.error("Only supported subcommand is add right now.") elif options.workon: if len(args) != 2: parser.error( "Argument count is wrong for --workon; must be add <project>") name, path = args[1], None else: if options.remote is None: parser.error('Adding non-workon projects requires a remote.') elif len(args) != 3: parser.error("Argument count is wrong for non-workon mode; " "must be add <project> <path> --remote <remote-arg>") name, path = args[1:] revision = options.revision if revision is not None: if (not git.IsRefsTags(revision) and not git.IsSHA1(revision)): revision = git.StripRefsHeads(revision, False) main_manifest = Manifest.FromPath(options.manifest_sym_path, empty_if_missing=False) local_manifest = Manifest.FromPath(options.local_manifest_path) main_element = main_manifest.GetProject(name, path=path) if options.workon: if main_element is None: parser.error('No project named %r in the default manifest.' % name) _AddProjectsToManifestGroups(options, main_element.attrib['name']) elif main_element is not None: if options.remote is not None: # Likely this project wasn't meant to be remote, so workon main element print "Project already exists in manifest. Using that as workon project." _AddProjectsToManifestGroups(options, main_element.attrib['name']) else: # Conflict will occur; complain. parser.error( "Requested project name=%r path=%r will conflict with " "your current manifest %s" % (name, path, active_manifest)) elif local_manifest.GetProject(name, path=path) is not None: parser.error("Requested project name=%r path=%r conflicts with " "your local_manifest.xml" % (name, path)) else: element = local_manifest.AddNonWorkonProject(name=name, path=path, remote=options.remote, revision=revision) _AddProjectsToManifestGroups(options, element.attrib['name']) with open(options.local_manifest_path, 'w') as f: f.write(local_manifest.ToString()) return 0
def main(argv): parser = GetParser() options = parser.parse_args(argv) repo_dir = git.FindRepoDir(os.getcwd()) if not repo_dir: parser.error("This script must be invoked from within a repository " "checkout.") options.git_config = os.path.join(repo_dir, 'manifests.git', 'config') options.repo_dir = repo_dir options.local_manifest_path = os.path.join(repo_dir, 'local_manifest.xml') # This constant is used only when we're doing an upgrade away from # minilayout.xml to default.xml. options.default_manifest_path = os.path.join(repo_dir, 'manifests', 'default.xml') options.manifest_sym_path = os.path.join(repo_dir, 'manifest.xml') active_manifest = os.path.basename(os.readlink(options.manifest_sym_path)) upgrade_required = active_manifest == 'minilayout.xml' if options.command == 'upgrade-minilayout': if not upgrade_required: print("This repository checkout isn't using minilayout.xml; " "nothing to do") else: _UpgradeMinilayout(options) return 0 elif upgrade_required: logging.warning( "Your repository checkout is using the old minilayout.xml workflow; " "auto-upgrading it.") main(['upgrade-minilayout']) # For now, we only support the add command. assert options.command == 'add' if options.workon: if options.path is not None: parser.error('Adding workon projects do not set project.') else: if options.remote is None: parser.error('Adding non-workon projects requires a remote.') if options.path is None: parser.error('Adding non-workon projects requires a path.') name = options.project path = options.path revision = options.revision if revision is not None: if (not git.IsRefsTags(revision) and not git.IsSHA1(revision)): revision = git.StripRefsHeads(revision, False) main_manifest = Manifest.FromPath(options.manifest_sym_path, empty_if_missing=False) local_manifest = Manifest.FromPath(options.local_manifest_path) main_element = main_manifest.GetProject(name, path=path) if options.workon: if main_element is None: parser.error('No project named %r in the default manifest.' % name) _AddProjectsToManifestGroups(options, main_element.attrib['name']) elif main_element is not None: if options.remote is not None: # Likely this project wasn't meant to be remote, so workon main element print( "Project already exists in manifest. Using that as workon project." ) _AddProjectsToManifestGroups(options, main_element.attrib['name']) else: # Conflict will occur; complain. parser.error( "Requested project name=%r path=%r will conflict with " "your current manifest %s" % (name, path, active_manifest)) elif local_manifest.GetProject(name, path=path) is not None: parser.error("Requested project name=%r path=%r conflicts with " "your local_manifest.xml" % (name, path)) else: element = local_manifest.AddNonWorkonProject(name=name, path=path, remote=options.remote, revision=revision) _AddProjectsToManifestGroups(options, element.attrib['name']) with open(options.local_manifest_path, 'w') as f: f.write(local_manifest.ToString()) return 0