def _UploadPrebuilt(self, package_path, url_suffix): """Upload host or board prebuilt files to Google Storage space. Args: package_path: The path to the packages dir. url_suffix: The remote subdirectory where we should upload the packages. """ # Process Packages file, removing duplicates and filtered packages. pkg_index = binpkg.GrabLocalPackageIndex(package_path) pkg_index.SetUploadLocation(self._binhost_base_url, url_suffix) pkg_index.RemoveFilteredPackages(self._ShouldFilterPackage) uploads = pkg_index.ResolveDuplicateUploads(self._pkg_indexes) unmatched_pkgs = self._packages - self._found_packages if unmatched_pkgs: cros_build_lib.Warning('unable to match packages: %r' % unmatched_pkgs) # Write Packages file. tmp_packages_file = pkg_index.WriteToNamedTemporaryFile() remote_location = '%s/%s' % (self._upload_location.rstrip('/'), url_suffix) assert remote_location.startswith('gs://') # Build list of files to upload. upload_files = GenerateUploadDict(package_path, remote_location, uploads) remote_file = '%s/Packages' % remote_location.rstrip('/') upload_files[tmp_packages_file.name] = remote_file RemoteUpload(self._acl, upload_files)
def main(argv): options = ParseArgs(argv) if not cros_build_lib.IsInsideChroot(): raise commandline.ChrootRequiredError() if os.geteuid() != 0: cros_build_lib.SudoRunCommand(sys.argv) return # sysroot must have a trailing / as the tree dictionary produced by # create_trees in indexed with a trailing /. sysroot = cros_build_lib.GetSysroot(options.board) + '/' trees = create_trees(target_root=sysroot, config_root=sysroot) vartree = trees[sysroot]['vartree'] cache_dir = os.path.join(path_util.FindCacheDir(), 'cros_install_debug_syms-v' + CACHE_VERSION) if options.clearcache: osutils.RmDir(cache_dir, ignore_missing=True) binhost_cache = None if options.cachebinhost: binhost_cache = cache.DiskCache(cache_dir) boto_file = vartree.settings['BOTO_CONFIG'] if boto_file: os.environ['BOTO_CONFIG'] = boto_file gs_context = gs.GSContext() symbols_mapping = RemoteSymbols(vartree, binhost_cache) if options.all: to_install = vartree.dbapi.cpv_all() else: to_install = [GetMatchingCPV(p, vartree.dbapi) for p in options.packages] to_install = [p for p in to_install if ShouldGetSymbols(p, vartree.dbapi, symbols_mapping)] if not to_install: logging.info('nothing to do, exit') return with DebugSymbolsInstaller(vartree, gs_context, sysroot, not options.debug) as installer: args = [(p, symbols_mapping[p]) for p in to_install] parallel.RunTasksInProcessPool(installer.Install, args, processes=options.jobs) logging.debug('installation done, updating packages index file') packages_dir = os.path.join(sysroot, 'packages') packages_file = os.path.join(packages_dir, 'Packages') # binpkg will set DEBUG_SYMBOLS automatically if it detects the debug symbols # in the packages dir. pkgindex = binpkg.GrabLocalPackageIndex(packages_dir) with open(packages_file, 'w') as p: pkgindex.Write(p)
def GetPrebuiltsFiles(prebuilts_root): """Find paths to prebuilts at the given root directory. Assumes the root contains a Portage package index named Packages. The package index paths are used to de-duplicate prebuilts uploaded. The immediate consequence of this is reduced storage usage. The non-obvious consequence is the shared packages generally end up with public permissions, while the board-specific packages end up with private permissions. This is what is meant to happen, but a further consequence of that is that when something happens that causes the toolchains to be uploaded as a private board's package, the board will not be able to build properly because it won't be able to fetch the toolchain packages, because they're expected to be public. Args: prebuilts_root: Absolute path to root directory containing a package index. Returns: List of paths to all prebuilt archives, relative to the root. """ package_index = binpkg.GrabLocalPackageIndex(prebuilts_root) prebuilt_paths = [] for package in package_index.packages: prebuilt_paths.append(package['CPV'] + '.tbz2') include_debug_symbols = package.get('DEBUG_SYMBOLS') if cros_build_lib.BooleanShellValue(include_debug_symbols, default=False): prebuilt_paths.append(package['CPV'] + '.debug.tbz2') _ValidatePrebuiltsFiles(prebuilts_root, prebuilt_paths) return prebuilt_paths
def testDebugSymbolsDetected(self): """When generating the Packages file, DEBUG_SYMBOLS is updated.""" osutils.WriteFile(os.path.join(self.tempdir, 'chromeos-base/shill-0.0.1-r1.debug.tbz2'), 'hello', makedirs=True) osutils.WriteFile(os.path.join(self.tempdir, 'Packages'), PACKAGES_CONTENT) index = binpkg.GrabLocalPackageIndex(self.tempdir) self.assertEquals(index.packages[0]['CPV'], 'chromeos-base/shill-0.0.1-r1') self.assertEquals(index.packages[0].get('DEBUG_SYMBOLS'), 'yes') self.assertFalse('DEBUG_SYMBOLS' in index.packages[1])
def setUp(self): class MockTemporaryFile(object): def __init__(self, name): self.name = name self.pkgindex = SimplePackageIndex() self.mox.StubOutWithMock(binpkg, 'GrabLocalPackageIndex') binpkg.GrabLocalPackageIndex('/packages').AndReturn(self.pkgindex) self.mox.StubOutWithMock(prebuilt, 'RemoteUpload') self.mox.StubOutWithMock(self.pkgindex, 'ResolveDuplicateUploads') self.pkgindex.ResolveDuplicateUploads([]).AndReturn(PRIVATE_PACKAGES) self.mox.StubOutWithMock(self.pkgindex, 'WriteToNamedTemporaryFile') fake_pkgs_file = MockTemporaryFile('fake') self.pkgindex.WriteToNamedTemporaryFile().AndReturn(fake_pkgs_file)
def _UploadPrebuilt(self, package_path, url_suffix): """Upload host or board prebuilt files to Google Storage space. Args: package_path: The path to the packages dir. url_suffix: The remote subdirectory where we should upload the packages. """ # Process Packages file, removing duplicates and filtered packages. pkg_index = binpkg.GrabLocalPackageIndex(package_path) pkg_index.SetUploadLocation(self._binhost_base_url, url_suffix) pkg_index.RemoveFilteredPackages(self._ShouldFilterPackage) uploads = pkg_index.ResolveDuplicateUploads(self._pkg_indexes) unmatched_pkgs = self._packages - self._found_packages if unmatched_pkgs: logging.warning('unable to match packages: %r' % unmatched_pkgs) # Write Packages file. pkg_index.header['TTL'] = _BINPKG_TTL tmp_packages_file = pkg_index.WriteToNamedTemporaryFile() remote_location = '%s/%s' % (self._upload_location.rstrip('/'), url_suffix) assert remote_location.startswith('gs://') # Build list of files to upload. Manually include the dev-only files but # skip them if not present. # TODO(deymo): Upload dev-only-extras.tbz2 as dev-only-extras.tar.bz2 # outside packages/ directory. See crbug.com/448178 for details. if os.path.exists(os.path.join(package_path, 'dev-only-extras.tbz2')): uploads.append({'CPV': 'dev-only-extras'}) upload_files = GenerateUploadDict(package_path, remote_location, uploads) remote_file = '%s/Packages' % remote_location.rstrip('/') upload_files[tmp_packages_file.name] = remote_file RemoteUpload(self._gs_context, self._acl, upload_files) with tempfile.NamedTemporaryFile( prefix='chromite.upload_prebuilts.index.') as index: GenerateHtmlIndex( [x[len(remote_location) + 1:] for x in upload_files.values()], index.name, self._target, self._version) self._Upload(index.name, '%s/index.html' % remote_location.rstrip('/')) link_name = 'Prebuilts[%s]: %s' % (self._target, self._version) url = '%s%s/index.html' % (gs.PUBLIC_BASE_HTTPS_URL, remote_location[len(gs.BASE_GS_URL):]) logging.PrintBuildbotLink(link_name, url)
def testCreateFilteredPackageIndex(self): """CreateDevInstallPackageFile writes updated file to disk.""" binhost.CreateFilteredPackageIndex(self.root, self.devinstall_package_list, self.upload_packages_file, 'gs://chromeos-prebuilt', 'target/') # We need to verify that a file was created at self.devinstall_package_list actual = binpkg.GrabLocalPackageIndex(self.upload_dir) self.assertEqual(actual.header['URI'], 'gs://chromeos-prebuilt') self.assertEqual(int(actual.header['TTL']), 60 * 60 * 24 * 365) self.assertEqual(actual.packages, [{ 'CPV': 'virtual/python-enum34-1', 'PATH': 'target/virtual/python-enum34-1.tbz2' }])
def CreateFilteredPackageIndex(package_path, devinstall_package_list, package_index_path, upload_uri, upload_path, sudo=False): """Create Package file for dev-install process. The created package file (package_index_path) contains only the packages from the system packages file (in package_path) that are in the devinstall_package_list. The new package file will use the provided values for upload_uri and upload_path. Args: package_path (str): Absolute path to the standard Packages file. devinstall_package_list (list[str]): Packages from packages.installable package_index_path (str): Absolute path for new Packages file. upload_uri (str): The URI where prebuilts will be uploaded. upload_path (str): The path at the URI for the prebuilts. sudo (bool): Whether to write the file as the root user. """ def ShouldFilterPackage(package): """Local func to filter packages not in the devinstall_package_list Args: package (dict): Dictionary with key 'CPV' and package name as value Returns: True (filter) if not in the devinstall_package_list, else False (don't filter) if in the devinstall_package_list """ value = package['CPV'] if value in devinstall_package_list: return False else: return True package_index = binpkg.GrabLocalPackageIndex(package_path) package_index.RemoveFilteredPackages(ShouldFilterPackage) package_index.SetUploadLocation(upload_uri, upload_path) package_index.header['TTL'] = 60 * 60 * 24 * 365 package_index.WriteFile(package_index_path, sudo=sudo)
def main(argv): if not cros_build_lib.IsInsideChroot(): raise commandline.ChrootRequiredError(argv) cmd = [ os.path.join(constants.CHROMITE_BIN_DIR, 'cros_install_debug_syms') ] + argv if os.geteuid() != 0: cros_build_lib.sudo_run(cmd) return options = ParseArgs(argv) # sysroot must have a trailing / as the tree dictionary produced by # create_trees in indexed with a trailing /. sysroot = cros_build_lib.GetSysroot(options.board) + '/' if options.list: ListInstallArgs(options, sysroot) return args = GetInstallArgsList(cmd) if not args: logging.info('No packages found needing debug symbols.') return # Partial to simplify the arguments to parallel since the first two are the # same for every call. partial_install = functools.partial(_InstallOne, sysroot, options.debug) pool = multiprocessing.Pool(processes=options.jobs) pool.map(partial_install, args) logging.debug('installation done, updating packages index file') packages_dir = os.path.join(sysroot, 'packages') packages_file = os.path.join(packages_dir, 'Packages') # binpkg will set DEBUG_SYMBOLS automatically if it detects the debug symbols # in the packages dir. pkgindex = binpkg.GrabLocalPackageIndex(packages_dir) with open(packages_file, 'w') as p: pkgindex.Write(p)
def UpdatePackageIndex(prebuilts_root, upload_uri, upload_path, sudo=False): """Update package index with information about where it will be uploaded. This causes the existing Packages file to be overwritten. Args: prebuilts_root: Absolute path to root directory containing binary prebuilts. upload_uri: The URI (typically GS bucket) where prebuilts will be uploaded. upload_path: The path at the URI for the prebuilts. sudo (bool): Whether to write the file as the root user. Returns: Path to the new Package index. """ assert not upload_path.startswith('/') package_index = binpkg.GrabLocalPackageIndex(prebuilts_root) package_index.SetUploadLocation(upload_uri, upload_path) package_index.header['TTL'] = 60 * 60 * 24 * 365 package_index_path = os.path.join(prebuilts_root, 'Packages') package_index.WriteFile(package_index_path, sudo=sudo) return package_index_path
def GetPackageIndex(binhost, binhost_cache=None): """Get the packages index for |binhost|. If a cache is provided, use it to a cache remote packages index. Args: binhost: a portage binhost, local, google storage or http. binhost_cache: a cache for the remote packages index. Returns: A PackageIndex object. """ key = binhost.split('://')[-1] key = key.rstrip('/').split('/') if binhost_cache and binhost_cache.Lookup(key).Exists(): with open(binhost_cache.Lookup(key).path) as f: return pickle.load(f) pkgindex = binpkg.GrabRemotePackageIndex(binhost, quiet=True) if pkgindex and binhost_cache: # Only cache remote binhosts as local binhosts can change. with tempfile.NamedTemporaryFile(delete=False) as temp_file: pickle.dump(pkgindex, temp_file) temp_file.file.close() binhost_cache.Lookup(key).Assign(temp_file.name) elif pkgindex is None: urlparts = urllib.parse.urlsplit(binhost) if urlparts.scheme not in ('file', ''): # Don't fail the build on network errors. Print a warning message and # continue. logging.warning('Could not get package index %s', binhost) return None binhost = urlparts.path if not os.path.isdir(binhost): raise ValueError('unrecognized binhost format for %s.') pkgindex = binpkg.GrabLocalPackageIndex(binhost) return pkgindex
def testUpdatePackageIndex(self): """UpdatePackageIndex writes updated file to disk.""" packages_content = """\ ARCH: amd64 TTL: 0 CPV: package/prebuilt """ osutils.WriteFile(os.path.join(self.root, 'Packages'), packages_content) binhost.UpdatePackageIndex(self.root, 'gs://chromeos-prebuilt', 'target/') actual = binpkg.GrabLocalPackageIndex(self.root) self.assertEqual(actual.header['URI'], 'gs://chromeos-prebuilt') self.assertEqual(int(actual.header['TTL']), 60 * 60 * 24 * 365) self.assertEqual(actual.packages, [{ 'CPV': 'package/prebuilt', 'PATH': 'target/package/prebuilt.tbz2' }])