def _ExtractOverlays(self): """Extracts list of overlays into class.""" overlays = portage_util.FindOverlays(self._run.config.overlays, buildroot=self._build_root) push_overlays = portage_util.FindOverlays( self._run.config.push_overlays, buildroot=self._build_root) # Sanity checks. # We cannot push to overlays that we don't rev. assert set(push_overlays).issubset(set(overlays)) # Either has to be a master or not have any push overlays. assert self._run.config.master or not push_overlays return overlays, push_overlays
def uprev_build_targets(build_targets, overlay_type, chroot=None, output_dir=None): """Uprev the set provided build targets, or all if not specified. Args: build_targets (list[build_target_lib.BuildTarget]|None): The build targets whose overlays should be uprevved, empty or None for all. overlay_type (str): One of the valid overlay types except None (see constants.VALID_OVERLAYS). chroot (chroot_lib.Chroot|None): The chroot to clean, if desired. output_dir (str|None): The path to optionally dump result files. """ # Need a valid overlay, but exclude None. assert overlay_type and overlay_type in constants.VALID_OVERLAYS if build_targets: overlays = portage_util.FindOverlaysForBoards( overlay_type, boards=[t.name for t in build_targets]) else: overlays = portage_util.FindOverlays(overlay_type) return uprev_overlays(overlays, build_targets=build_targets, chroot=chroot, output_dir=output_dir)
def enumerate_package_ebuilds(): """Determines package -> ebuild mappings for all packages. Yields a series of (package_path, package_name, [path_to_ebuilds]). This may yield the same package name multiple times if it's available in multiple overlays. """ for overlay in portage_util.FindOverlays(overlay_type='both'): logging.debug('Found overlay %s', overlay) # Note that portage_util.GetOverlayEBuilds can't be used here, since that # specifically only searches for cros_workon candidates. We care about # everything we can possibly build. for dir_path, dir_names, file_names in os.walk(overlay): ebuilds = [x for x in file_names if x.endswith('.ebuild')] if not ebuilds: continue # os.walk directly uses `dir_names` to figure out what to walk next. If # there are ebuilds here, walking any lower is a waste, so don't do it. del dir_names[:] ebuild_dir = os.path.basename(dir_path) ebuild_parent_dir = os.path.basename(os.path.dirname(dir_path)) package_name = '%s/%s' % (ebuild_parent_dir, ebuild_dir) yield dir_path, package_name, ebuilds
def main(argv): opts = _ParseArguments(argv) args = (constants.BOTH_OVERLAYS, opts.board) # Verify that a primary overlay exists. try: primary_overlay = portage_util.FindPrimaryOverlay(*args) except portage_util.MissingOverlayException as ex: cros_build_lib.Die(str(ex)) # Get the overlays to print. if opts.primary_only: overlays = [primary_overlay] else: overlays = portage_util.FindOverlays(*args) # Exclude any overlays in src/third_party, for backwards compatibility with # scripts that expected these to not be listed. if not opts.all: ignore_prefix = os.path.join(constants.SOURCE_ROOT, 'src', 'third_party') overlays = [o for o in overlays if not o.startswith(ignore_prefix)] if opts.board_overlay and os.path.isdir(opts.board_overlay): overlays.append(os.path.abspath(opts.board_overlay)) print('\n'.join(overlays))
def main(argv): options = _ParseArguments(argv) if not options.boards: overlays = portage_util.FindOverlays(options.overlay_type) else: overlays = set() for board in options.boards: board_overlays = portage_util.FindOverlays(options.overlay_type, board=board) overlays = overlays.union(board_overlays) overlays = list(overlays) manifest = git.ManifestCheckout.Cached(constants.SOURCE_ROOT) _WorkOnCommit(options, overlays, manifest, options.packages or None)
def ListWorkonPackagesInfo(sysroot): """Find the specified workon packages for the specified board. Args: sysroot: sysroot_lib.Sysroot object. Returns: A list of WorkonPackageInfo objects for unique packages being worked on. """ # Import portage late so that this script can be imported outside the chroot. # pylint: disable=F0401 import portage.const packages = ListWorkonPackages(sysroot) if not packages: return [] results = {} if sysroot.path == '/': overlays = portage_util.FindOverlays(constants.BOTH_OVERLAYS, None) else: overlays = sysroot.GetStandardField('PORTDIR_OVERLAY').splitlines() vdb_path = os.path.join(sysroot.path, portage.const.VDB_PATH) for overlay in overlays: for filename, projects, srcpaths in portage_util.GetWorkonProjectMap( overlay, packages): # chromeos-base/power_manager/power_manager-9999 # cp = chromeos-base/power_manager # cpv = chromeos-base/power_manager-9999 category, pn, p = portage_util.SplitEbuildPath(filename) cp = '%s/%s' % (category, pn) cpv = '%s/%s' % (category, p) # Get the time the package finished building. TODO(build): Teach Portage # to store the time the package started building and use that here. pkg_mtime_file = os.path.join(vdb_path, cpv, 'BUILD_TIME') try: pkg_mtime = int(osutils.ReadFile(pkg_mtime_file)) except EnvironmentError as ex: if ex.errno != errno.ENOENT: raise pkg_mtime = 0 # Get the modificaton time of the ebuild in the overlay. src_ebuild_mtime = os.lstat(os.path.join(overlay, filename)).st_mtime # Write info into the results dictionary, overwriting any previous # values. This ensures that overlays override appropriately. results[cp] = WorkonPackageInfo(cp, pkg_mtime, projects, srcpaths, src_ebuild_mtime) return results.values()
def PerformStage(self): # Either has to be a master or not have any push overlays. assert self._run.config.master assert self._run.config.push_overlays # If we're a commit queue, we should clean out our local changes, resync, # and reapply our uprevs. This is necessary so that 1) we are sure to point # at the remote SHA1s, not our local SHA1s; 2) we can avoid doing a # rebase; 3) in the case of failure and staging_branch is None, we don't # submit the changes that were committed locally. # # If we're not a commit queue and the build succeeded, we can skip the # cleanup here. This is a cheap trick so that the Chrome PFQ pushes its # earlier uprev from the SyncChrome stage (it would be a bit tricky to # replicate the uprev here, so we'll leave it alone). # If we're not a commit queue and staging_branch is not None, we can skip # the cleanup here. When staging_branch is not None, we're going to push # the local commits generated in AFDOUpdateEbuild stage to the # staging_branch, cleaning up repository here will wipe out the local # commits. if not self.success: repo = self.GetRepoRepository() # Clean up our root and sync down the latest changes that were # submitted. repo.BuildRootGitCleanup(self._build_root) # Sync down the latest changes we have submitted. if self._run.options.sync: next_manifest = self._run.config.manifest repo.Sync(next_manifest) # Commit uprev and portage cache regeneration locally. if self._run.options.uprev and self._run.config.uprev: commands.UprevPackages(self._build_root, self._boards, overlay_type=self._run.config.overlays) push_overlays = portage_util.FindOverlays( self._run.config.push_overlays, buildroot=self._build_root) commands.RegenPortageCache(push_overlays) # When prebuilts is True, if it's a successful run, update binhost conf. if self._run.config.prebuilts and self.success: confwriter = prebuilts.BinhostConfWriter(self._run) confwriter.Perform() # Push the uprev, portage cache, and binhost commits. commands.UprevPush(self._build_root, overlay_type=self._run.config.push_overlays, dryrun=self._run.options.debug) if config_lib.IsMasterAndroidPFQ(self._run.config) and self.success: self._run.attrs.metadata.UpdateWithDict({'UprevvedAndroid': True})
def main(argv): opts = _ParseArguments(argv) overlays = portage_util.FindOverlays(constants.BOTH_OVERLAYS, opts.board) # Exclude any overlays in src/third_party, for backwards compatibility with # scripts that expected these to not be listed. if not opts.all: ignore_prefix = os.path.join(constants.SOURCE_ROOT, 'src', 'third_party') overlays = [o for o in overlays if not o.startswith(ignore_prefix)] print('\n'.join(overlays))
def testFindWorkonProjects(self): """Test if we can find the list of workon projects.""" ply_image = 'media-gfx/ply-image' ply_image_project = 'chromiumos/third_party/ply-image' this = 'chromeos-base/chromite' this_project = 'chromiumos/chromite' matches = [([ply_image], set([ply_image_project])), ([this], set([this_project])), ([ply_image, this], set([ply_image_project, this_project]))] if portage_util.FindOverlays(constants.BOTH_OVERLAYS): for packages, projects in matches: self.assertEquals(projects, portage_util.FindWorkonProjects(packages))
def ClassifyOverlayChanges(cls, changes, config, build_root, manifest, packages_under_test): """Classifies overlay changes in |changes|. Args: changes: The list or set of GerritPatch instances. config: The cbuildbot config. build_root: Path to the build root. manifest: A ManifestCheckout instance representing our build directory. packages_under_test: A list of packages names included in the build without version/revision (e.g. ['chromeos-base/chromite']). If None, don't try to map overlay changes to packages. Returns: A (overlay_changes, irrelevant_overlay_changes) tuple; overlay_changes is a subset of |changes| that have modified one or more overlays, and irrelevant_overlay_changes is a subset of overlay_changes which are irrelevant to |config|. """ visible_overlays = set( portage_util.FindOverlays(config.overlays, None, build_root)) # The overlays relevant to this build. relevant_overlays = GetRelevantOverlaysForConfig(config, build_root) overlay_changes = set() irrelevant_overlay_changes = set() for change in changes: affected_overlays = GetAffectedOverlays(change, manifest, visible_overlays) if affected_overlays is not None: # The change modifies an overlay. overlay_changes.add(change) if not any(x in relevant_overlays for x in affected_overlays): # The change touched an irrelevant overlay. irrelevant_overlay_changes.add(change) continue if packages_under_test: # If the change modifies packages that are not part of this # build, they are considered irrelevant too. packages = GetAffectedPackagesForOverlayChange( change, manifest, visible_overlays) if packages: logging.info('%s affects packages %s', cros_patch.GetChangesAsString([change]), ', '.join(packages)) if not any(x in packages_under_test for x in packages): irrelevant_overlay_changes.add(change) return overlay_changes, irrelevant_overlay_changes
def GetBoardOverlay(build_path, target): """Get the path to the board variant. Args: build_path: The path to the root of the build directory target: The target board as a BuildTarget object. Returns: The last overlay configured for the given board as a string. """ board = target.board_variant overlays = portage_util.FindOverlays(constants.BOTH_OVERLAYS, board, buildroot=build_path) # We only care about the last entry. return overlays[-1]
def GenerateBoardConfig(self, board): """Generates the configuration for a given board. Args: board: board name to use to generate the configuration. """ toolchains = toolchain.GetToolchainsForBoard(board) # Compute the overlay list. portdir_overlays = portage_util.FindOverlays(constants.BOTH_OVERLAYS, board) prefix = os.path.join(constants.SOURCE_ROOT, 'src', 'third_party') board_overlays = [o for o in portdir_overlays if not o.startswith(prefix)] header = "# Created by cros_sysroot_utils from --board=%s." % board return self._GenerateConfig(toolchains, board_overlays, portdir_overlays, header, BOARD_USE=board)
def GetRelevantOverlaysForConfig(config, build_root): """Returns a list of overlays relevant to |config|. Args: config: A cbuildbot config name. build_root: Path to the build root. Returns: A set of overlays. """ relevant_overlays = set() for board in config.boards: overlays = portage_util.FindOverlays(constants.BOTH_OVERLAYS, board, build_root) relevant_overlays.update(overlays) return relevant_overlays
def GetToolchainsForBoard(board, buildroot=constants.SOURCE_ROOT): """Get a dictionary mapping toolchain targets to their options for a board. Args: board: board name in question (e.g. 'daisy'). buildroot: path to buildroot. Returns: The list of toolchain tuples for the given board """ overlays = portage_util.FindOverlays( constants.BOTH_OVERLAYS, None if board in ('all', 'sdk') else board, buildroot=buildroot) toolchains = toolchain_list.ToolchainList(overlays=overlays) targets = toolchains.GetMergedToolchainSettings() if board == 'sdk': targets = FilterToolchains(targets, 'sdk', True) return targets
def RegenBuildCache(chroot, overlay_type): """Regenerate the Build Cache for the given target. Args: chroot (chroot_lib): The chroot where the regen command will be run. overlay_type: one of "private", "public", or "both". Returns: list[str]: The overlays with updated caches. """ overlays = portage_util.FindOverlays(overlay_type) task = functools.partial(portage_util.RegenCache, commit_changes=False, chroot=chroot) task_inputs = [[o] for o in overlays if os.path.isdir(o)] results = parallel.RunTasksInProcessPool(task, task_inputs) # Filter out all of the unchanged-overlay results. return [overlay_dir for overlay_dir in results if overlay_dir]
def _ParseArguments(argv): """Parse and validate arguments.""" parser = GetParser() options = parser.parse_args(argv) # Parse, cleanup, and populate options. if options.packages: options.packages = options.packages.split(':') if options.boards: options.boards = options.boards.split(':') options.overlays = portage_util.FindOverlays(options.overlay_type) # Verify options. if not options.packages and not options.all: parser.error('Please specify at least one package (--packages)') if options.force and options.all: parser.error('Cannot use --force with --all. You must specify a list of ' 'packages you want to force uprev.') options.Freeze() return options
def main(argv): parser = GetParser() options = parser.parse_args(argv) # TODO: Remove this code in favor of a simple default on buildroot when # srcroot is removed. if options.srcroot and not options.buildroot: # Convert /<repo>/src -> <repo> options.buildroot = os.path.dirname(options.srcroot) if not options.buildroot: options.buildroot = constants.SOURCE_ROOT options.srcroot = None options.Freeze() if options.command == 'commit': if not options.packages and not options.all: parser.error('Please specify at least one package (--packages)') if options.force and options.all: parser.error( 'Cannot use --force with --all. You must specify a list of ' 'packages you want to force uprev.') if not os.path.isdir(options.buildroot): parser.error('buildroot is not a valid path: %s' % options.buildroot) if options.overlay_type and options.overlays: parser.error('Cannot use --overlay-type with --overlays.') portage_util.EBuild.VERBOSE = options.verbose package_list = None if options.packages: package_list = options.packages.split(':') overlays = [] if options.overlays: for path in options.overlays.split(':'): if not os.path.isdir(path): cros_build_lib.Die('Cannot find overlay: %s' % path) overlays.append(os.path.realpath(path)) elif options.overlay_type: overlays = portage_util.FindOverlays(options.overlay_type, buildroot=options.buildroot) else: logging.warning('Missing --overlays argument') overlays.extend([ '%s/src/private-overlays/chromeos-overlay' % options.buildroot, '%s/src/third_party/chromiumos-overlay' % options.buildroot ]) manifest = git.ManifestCheckout.Cached(options.buildroot) # Dict mapping from each overlay to its tracking branch. overlay_tracking_branch = {} # Dict mapping from each git repository (project) to a list of its overlays. git_project_overlays = {} for overlay in overlays: remote_ref = git.GetTrackingBranchViaManifest(overlay, manifest=manifest) overlay_tracking_branch[overlay] = remote_ref.ref git_project_overlays.setdefault(remote_ref.project_name, []).append(overlay) if options.command == 'push': _WorkOnPush(options, overlay_tracking_branch, git_project_overlays) elif options.command == 'commit': _WorkOnCommit(options, overlays, overlay_tracking_branch, git_project_overlays, manifest, package_list)
def GenerateSourcePathMapping(packages, sysroot_path, board): """Returns a map from each package to the source paths it depends on. A source path is considered dependency of a package if modifying files in that path might change the content of the resulting package. Notes: 1) This method errs on the side of returning unneeded dependent paths. i.e: for a given package X, some of its dependency source paths may contain files which doesn't affect the content of X. On the other hands, any missing dependency source paths for package X is considered a bug. 2) This only outputs the direct dependency source paths for a given package and does not takes include the dependency source paths of dependency packages. e.g: if package A depends on B (DEPEND=B), then results of computing dependency source paths of A doesn't include dependency source paths of B. Args: packages: The list of packages CPV names (str) sysroot_path (str): The path to the sysroot. If the packages are board agnostic, then this should be '/'. board (str): The name of the board if packages are dependency of board. If the packages are board agnostic, then this should be None. Returns: Map from each package to the source path (relative to the repo checkout root, i.e: ~/trunk/ in your cros_sdk) it depends on. For each source path which is a directory, the string is ended with a trailing '/'. """ results = {} packages_to_ebuild_paths = portage_util.FindEbuildsForPackages( packages, sysroot=sysroot_path, check=True) # Source paths which are the directory of ebuild files. for package, ebuild_path in packages_to_ebuild_paths.items(): # Include the entire directory that contains the ebuild as the package's # FILESDIR probably lives there too. results[package] = [os.path.dirname(ebuild_path)] # Source paths which are cros workon source paths. buildroot = os.path.join(constants.CHROOT_SOURCE_ROOT, 'src') manifest = git.ManifestCheckout.Cached(buildroot) for package, ebuild_path in packages_to_ebuild_paths.items(): attrs = portage_util.EBuild.Classify(ebuild_path) if (not attrs.is_workon or # Blacklisted ebuild is pinned to a specific git sha1, so change in # that repo matter to the ebuild. attrs.is_blacklisted): continue ebuild = portage_util.EBuild(ebuild_path) workon_subtrees = ebuild.GetSourceInfo(buildroot, manifest).subtrees for path in workon_subtrees: results[package].append(path) if board: overlay_directories = portage_util.FindOverlays( overlay_type='both', board=board) else: # If a board is not specified we assume the package is intended for the SDK # and so we use the overlays for the SDK builder. overlay_directories = portage_util.FindOverlays( overlay_type='both', board=constants.CHROOT_BUILDER_BOARD) eclass_path_cache = {} for package, ebuild_path in packages_to_ebuild_paths.items(): eclass_paths = GetRelevantEclassesForEbuild(ebuild_path, eclass_path_cache, overlay_directories) results[package].extend(eclass_paths) # Source paths which are the overlay directories for the given board # (packages are board specific). # The only parts of the overlay that affect every package are the current # profile (which lives somewhere in the profiles/ subdir) and a top-level # make.conf (if it exists). profile_directories = [ os.path.join(x, 'profiles') for x in overlay_directories ] make_conf_paths = [os.path.join(x, 'make.conf') for x in overlay_directories] # These directories *might* affect a build, so we include them for now to # be safe. metadata_directories = [ os.path.join(x, 'metadata') for x in overlay_directories ] scripts_directories = [ os.path.join(x, 'scripts') for x in overlay_directories ] for package in results: results[package].extend(profile_directories) results[package].extend(make_conf_paths) results[package].extend(metadata_directories) results[package].extend(scripts_directories) # The 'crosutils' repo potentially affects the build of every package. results[package].append(constants.CROSUTILS_DIR) # chromiumos-overlay specifies default settings for every target in # chromeos/config and so can potentially affect every board. for package in results: results[package].append( os.path.join(constants.CHROOT_SOURCE_ROOT, constants.CHROMIUMOS_OVERLAY_DIR, 'chromeos', 'config')) for p in results: results[p] = NormalizeSourcePaths(results[p]) return results
def PerformStage(self): if (config_lib.IsMasterCQ(self._run.config) and not self.sync_stage.pool.HasPickedUpCLs()): logging.info('No CLs have been picked up and no slaves have been ' 'scheduled in this run. Will not publish uprevs.') return # Either has to be a master or not have any push overlays. assert self._run.config.master assert self._run.config.push_overlays staging_branch = None if self.stage_push: if not config_lib.IsMasterChromePFQ(self._run.config): raise ValueError( 'This build must be a master chrome PFQ build ' 'when stage_push is True.') build_identifier, _ = self._run.GetCIDBHandle() buildbucket_id = build_identifier.buildbucket_id # If the master passed BinHostTest and all the important slaves passed # UploadPrebuiltsTest, push uprev commits to a staging_branch. if (self.CheckMasterBinhostTest(buildbucket_id) and self.CheckSlaveUploadPrebuiltsTest()): staging_branch = ('refs/' + constants.PFQ_REF + '/' + constants.STAGING_PFQ_BRANCH_PREFIX + str(buildbucket_id)) # If we're a commit queue, we should clean out our local changes, resync, # and reapply our uprevs. This is necessary so that 1) we are sure to point # at the remote SHA1s, not our local SHA1s; 2) we can avoid doing a # rebase; 3) in the case of failure and staging_branch is None, we don't # submit the changes that were committed locally. # # If we're not a commit queue and the build succeeded, we can skip the # cleanup here. This is a cheap trick so that the Chrome PFQ pushes its # earlier uprev from the SyncChrome stage (it would be a bit tricky to # replicate the uprev here, so we'll leave it alone). # If we're not a commit queue and staging_branch is not None, we can skip # the cleanup here. When staging_branch is not None, we're going to push # the local commits generated in AFDOUpdateEbuild stage to the # staging_branch, cleaning up repository here will wipe out the local # commits. if (config_lib.IsCQType(self._run.config.build_type) or not (self.success or staging_branch is not None)): repo = self.GetRepoRepository() # Clean up our root and sync down the latest changes that were # submitted. repo.BuildRootGitCleanup(self._build_root) # Sync down the latest changes we have submitted. if self._run.options.sync: next_manifest = self._run.config.manifest repo.Sync(next_manifest) # Commit uprev and portage cache regeneration locally. if self._run.options.uprev and self._run.config.uprev: commands.UprevPackages(self._build_root, self._boards, overlay_type=self._run.config.overlays) push_overlays = portage_util.FindOverlays( self._run.config.push_overlays, buildroot=self._build_root) commands.RegenPortageCache(push_overlays) # When prebuilts is True, if it's a successful run or staging_branch is # not None for a master-chrome-pfq run, update binhost conf if (self._run.config.prebuilts and (self.success or staging_branch is not None)): confwriter = prebuilts.BinhostConfWriter(self._run) confwriter.Perform() # Push the uprev, portage cache, and binhost commits. commands.UprevPush(self._build_root, overlay_type=self._run.config.push_overlays, dryrun=self._run.options.debug, staging_branch=staging_branch) if config_lib.IsMasterAndroidPFQ(self._run.config) and self.success: self._run.attrs.metadata.UpdateWithDict({'UprevvedAndroid': True})
def setUp(self): # Create an overlay tree to run tests against and isolate ourselves from # changes in the main tree. D = cros_test_lib.Directory overlay_files = (D('metadata', ('layout.conf',)),) board_overlay_files = overlay_files + ( 'make.conf', 'toolchain.conf', ) file_layout = ( D('src', ( D('overlays', ( D('overlay-%s' % self.PUB_ONLY, board_overlay_files), D('overlay-%s' % self.PUB2_ONLY, board_overlay_files), D('overlay-%s' % self.PUB_PRIV, board_overlay_files), D('overlay-%s' % self.PUB_PRIV_VARIANT, board_overlay_files), )), D('private-overlays', ( D('overlay-%s' % self.PUB_PRIV, board_overlay_files), D('overlay-%s' % self.PUB_PRIV_VARIANT, board_overlay_files), D('overlay-%s' % self.PRIV_ONLY, board_overlay_files), )), D('third_party', ( D('chromiumos-overlay', overlay_files), D('portage-stable', overlay_files), )), )), D('projects', ( D(self.BRICK, ( D('packages', overlay_files), 'config.json', )), )), ) cros_test_lib.CreateOnDiskHierarchy(self.tempdir, file_layout) # Seed the board overlays. conf_data = 'repo-name = %(repo-name)s\nmasters = %(masters)s' conf_path = os.path.join(self.tempdir, 'src', '%(private)soverlays', 'overlay-%(board)s', 'metadata', 'layout.conf') for board in (self.PUB_PRIV, self.PUB_PRIV_VARIANT, self.PUB_ONLY, self.PUB2_ONLY): settings = { 'board': board, 'masters': 'portage-stable ', 'private': '', 'repo-name': board, } if '_' in board: settings['masters'] += board.split('_')[0] osutils.WriteFile(conf_path % settings, conf_data % settings) # Seed the brick, with PUB_ONLY overlay as its primary overlay. osutils.WriteFile(os.path.join(self.tempdir, 'projects', self.BRICK, 'packages', 'metadata', 'layout.conf'), 'repo-name = %s\nmasters = %s' % (self.BRICK, self.PUB_ONLY)) for board in (self.PUB_PRIV, self.PUB_PRIV_VARIANT, self.PRIV_ONLY): settings = { 'board': board, 'masters': 'portage-stable ', 'private': 'private-', 'repo-name': '%s-private' % board, } if '_' in board: settings['masters'] += board.split('_')[0] osutils.WriteFile(conf_path % settings, conf_data % settings) # Seed the common overlays. conf_path = os.path.join(self.tempdir, 'src', 'third_party', '%(overlay)s', 'metadata', 'layout.conf') osutils.WriteFile(conf_path % {'overlay': 'chromiumos-overlay'}, conf_data % {'repo-name': 'chromiumos', 'masters': ''}) osutils.WriteFile(conf_path % {'overlay': 'portage-stable'}, conf_data % {'repo-name': 'portage-stable', 'masters': ''}) # Now build up the list of overlays that we'll use in tests below. self.overlays = {} for b in (None, self.FAKE, self.PUB_PRIV, self.PUB_PRIV_VARIANT, self.PUB_ONLY, self.PUB2_ONLY, self.PRIV_ONLY, self.BRICK): self.overlays[b] = d = {} for o in (self.PRIVATE, self.PUBLIC, self.BOTH, None): try: d[o] = portage_util.FindOverlays(o, b, self.tempdir) except portage_util.MissingOverlayException: d[o] = [] self._no_overlays = not bool(any(d.values()))
def PerformStage(self): push_overlays = portage_util.FindOverlays( self._run.config.push_overlays, buildroot=self._build_root) commands.RegenPortageCache(push_overlays)