def _DeployApp(basedir, options): """Deploy the prepared app from basedir. Args: basedir: The base directory where the app has already been prepped. options: The command-line options passed in. """ cros_build_lib.RunCommand([ './ae_shell', 'cq_stats', '--', 'python', 'cq_stats/manage.py', 'collectstatic', '--noinput' ], cwd=basedir) # Remove sensitive files that are needed to run tools locally to prepare the # deploy directory, but that we don't want to push to AE. cidb_cred_file = 'annotator_cidb_creds' if options.instance == APP_INSTANCE_DEBUG: cidb_cred_file += '.debug' cidb_cred_path = os.path.join(basedir, 'cq_stats', cidb_cred_file) osutils.SafeUnlink(os.path.join(cidb_cred_path, 'client-cert.pem')) osutils.SafeUnlink(os.path.join(cidb_cred_path, 'client-key.pem')) osutils.SafeUnlink(os.path.join(cidb_cred_path, 'server-ca.pem')) cros_build_lib.RunCommand([ './ae_shell', 'cq_stats', '--', 'appcfg.py', '--oauth2', '--noauth_local_webserver', 'update', 'cq_stats' ], cwd=basedir)
def RunBuild(options, base, target, queue): """Run the U-Boot build. Args: options: Command line options. base: Base U-Boot flags. target: Target to build. queue: A parallel queue to add jobs to. """ Log('U-Boot build flags: %s' % ' '.join(base)) # Reconfigure U-Boot. if not options.incremental: # Ignore any error from this, some older U-Boots fail on this. cros_build_lib.RunCommand(base + ['distclean'], **kwargs) if os.path.exists('tools/genboardscfg.py'): mtarget = 'defconfig' else: mtarget = 'config' cmd = base + ['%s_%s' % (uboard, mtarget)] result = cros_build_lib.RunCommand(cmd, capture_output=True, combine_stdout_stderr=True, **kwargs) if result.returncode: print("cmd: '%s', output: '%s'" % (result.cmdstr, result.output)) sys.exit(result.returncode) # Do the actual build. if options.build: result = cros_build_lib.RunCommand(base + [target], capture_output=True, combine_stdout_stderr=True, **kwargs) if result.returncode: # The build failed, so output the results to stderr. print("cmd: '%s', output: '%s'" % (result.cmdstr, result.output), file=sys.stderr) sys.exit(result.returncode) files = ['%s/u-boot' % outdir] spl = glob.glob('%s/spl/u-boot-spl' % outdir) if spl: files += spl if options.size: result = cros_build_lib.RunCommand([CompilerTool('size')] + files, **kwargs) if result.returncode: sys.exit() # Create disassembly files .dis and .Dis (full dump) for f in files: base = os.path.splitext(f)[0] if options.objdump: queue.put(('-d', f, base + '.dis')) queue.put(('-D', f, base + '.Dis')) else: # Remove old files which otherwise might be confusing osutils.SafeUnlink(base + '.dis') osutils.SafeUnlink(base + '.Dis') Log('Output directory %s' % outdir)
def Copy(ctx, uri, filename): """Run the copy using a temp file.""" temp_path = '%s.tmp' % filename osutils.SafeUnlink(temp_path) try: ctx.Copy(uri, temp_path, log_output=True) shutil.move(temp_path, filename) finally: osutils.SafeUnlink(temp_path)
def f(dirname, sudo=False): dirname = os.path.join(self.tempdir, dirname) path = os.path.join(dirname, 'foon') os.makedirs(dirname) open(path, 'w').close() self.assertTrue(os.path.exists(path)) if sudo: cros_build_lib.SudoRunCommand( ['chown', 'root:root', '-R', '--', dirname], print_cmd=False) self.assertRaises(EnvironmentError, os.unlink, path) self.assertTrue(osutils.SafeUnlink(path, sudo=sudo)) self.assertFalse(os.path.exists(path)) self.assertFalse(osutils.SafeUnlink(path)) self.assertFalse(os.path.exists(path))
def _CreateVMImage(src_dir, dest_dir): """Creates a VM image from a given chromiumos image. Args: src_dir: Path to the directory containing (non-VM) image. Defaults to None to use the latest image for the board. dest_dir: Path to the directory where the VM image should be written. Returns: The path of the created VM image. """ # image_to_vm.sh only runs in chroot, but src_dir / dest_dir may not be # reachable from chroot. Also, image_to_vm.sh needs all the contents of # src_dir to work correctly (it silently does the wrong thing if some files # are missing). # So, create a tempdir reachable from chroot, copy everything to that path, # create vm image there and finally move it all to dest_dir. with chroot_util.TempDirInChroot() as tempdir: logging.debug('Copying images from %s to %s.', src_dir, tempdir) osutils.CopyDirContents(src_dir, tempdir) # image_to_vm.sh doesn't let us provide arbitrary names for the input image. # Instead, it picks the name based on whether we pass in --test_image or not # (and doesn't use that flag for anything else). cmd = [ path_util.ToChrootPath( os.path.join(constants.CROSUTILS_DIR, 'image_to_vm.sh')), '--from=%s' % path_util.ToChrootPath(tempdir), '--disk_layout=16gb-rootfs', '--test_image', ] try: cros_build_lib.run(cmd, enter_chroot=True, cwd=constants.SOURCE_ROOT) except cros_build_lib.RunCommandError as e: raise SetupError('Failed to create VM image for %s: %s' % (src_dir, e)) # Preserve most content, although we should need only the generated VM # image. Other files like boot.desc might be needed elsewhere, but the # source images should no longer be needed. osutils.SafeUnlink(os.path.join(tempdir, constants.BASE_IMAGE_BIN), sudo=True) osutils.SafeUnlink(os.path.join(tempdir, constants.TEST_IMAGE_BIN), sudo=True) osutils.CopyDirContents(tempdir, dest_dir) # The exact name of the output image is hard-coded in image_to_vm.sh return os.path.join(dest_dir, constants.VM_IMAGE_BIN)
def RunBuild(options, base, target, queue): """Run the U-Boot build. Args: options: Command line options. base: Base U-Boot flags. target: Target to build. queue: A parallel queue to add jobs to. """ Log('U-Boot build flags: %s' % ' '.join(base)) # Reconfigure U-Boot. if not options.incremental: # Ignore any error from this, some older U-Boots fail on this. cros_build_lib.RunCommand(base + ['distclean'], **kwargs) result = cros_build_lib.RunCommand(base + ['%s_config' % uboard], **kwargs) if result.returncode: sys.exit() # Do the actual build. if options.build: result = cros_build_lib.RunCommand(base + [target], **kwargs) if result.returncode: sys.exit() files = ['%s/u-boot' % outdir] spl = glob.glob('%s/spl/u-boot-spl' % outdir) if spl: files += spl if options.size: result = cros_build_lib.RunCommand([CompilerTool('size')] + files, **kwargs) if result.returncode: sys.exit() # Create disassembly files .dis and .Dis (full dump) for f in files: base = os.path.splitext(f)[0] if options.objdump: queue.put(('-d', f, base + '.dis')) queue.put(('-D', f, base + '.Dis')) else: # Remove old files which otherwise might be confusing osutils.SafeUnlink(base + '.dis') osutils.SafeUnlink(base + '.Dis') Log('Output directory %s' % outdir)
def close(self): if self.dev: for part in list(self._mounted): self._Unmount(part) # We still need to remove some directories, since _Unmount did not. for link in self._symlinks: osutils.SafeUnlink(link) self._symlinks = set() for path in self._to_be_rmdir: retry_util.RetryException(cros_build_lib.RunCommandError, 60, osutils.RmDir, path, sudo=True, sleep=1) self._to_be_rmdir = set() cmd = ['partx', '-d', self.dev] cros_build_lib.sudo_run(cmd, quiet=True, error_code_ok=True) cros_build_lib.sudo_run(['losetup', '--detach', self.dev]) self.dev = None self.parts = {} self._gpt_table = None if self._destination_created: self.destination = None self._destination_created = False
def uprev(self, package: str) -> UprevResult: """Uprev a chrome package.""" package_dir = os.path.join(self._overlay_dir, package) package_name = os.path.basename(package) # Find the unstable (9999) ebuild and any existing stable ebuilds. unstable_ebuild, stable_ebuilds = find_chrome_ebuilds(package_dir) # Find the best stable candidate to uprev -- the one that will be replaced. should_uprev, candidate = self._find_chrome_uprev_candidate( stable_ebuilds) if not should_uprev and candidate: return UprevResult(Outcome.NEWER_VERSION_EXISTS, candidate) result = self._mark_as_stable(candidate, unstable_ebuild, package_name, package_dir) # If result is falsey then no files changed, and we don't need to do any # clean-up. if not result: return result self._new_ebuild_files.append(result.best_ebuild.ebuild_path) if candidate and not candidate.IsSticky(): osutils.SafeUnlink(candidate.ebuild_path) self._removed_ebuild_files.append(candidate.ebuild_path) if self._build_targets: self._clean_stale_package(result.best_ebuild.atom) return result
def Uninstall(self): """Uninstalls tools from the sysroot. Undoes Install.""" # Uninstall asan_symbolize.py. osutils.SafeUnlink(self.asan_symbolize_sysroot_path, sudo=True) # Uninstall the LLVM binaries. for llvm_binary in self._GetLLVMBinaries(): llvm_binary.Uninstall()
def CopyTestcaseToSysroot(src_testcase_path): """Copies a testcase into the sysroot. Copies a testcase into the sysroot. Doesn't copy if testcase is already in sysroot. Args: src_testcase_path: A path (in the chroot) to a testcase that will be copied into sysroot. Returns: The path in the sysroot that the testcase was copied to. """ if SysrootPath.IsPathInSysroot(src_testcase_path): # Don't copy if |src_testcase_path| is already in sysroot. Just return it in # the format expected by the caller. return SysrootPath(src_testcase_path) dest_testcase_path = GetPathForCopy(TESTCASE_DIRECTORY_NAME, src_testcase_path) osutils.SafeMakedirsNonRoot(os.path.dirname(dest_testcase_path.chroot)) osutils.SafeUnlink(dest_testcase_path.chroot) shutil.copy(src_testcase_path, dest_testcase_path.chroot) return dest_testcase_path
def Download(cls, branch, build_id, target, resource_id, dest_file): """Download the list of artifacts for given build id and target. Args: branch: branch of the desired build. build_id: Build id of the Android build, e.g., 2155602. target: Target of the Android build, e.g., shamu-userdebug. resource_id: Name of the artifact to donwload. dest_file: Path to the file to download to. """ service_obj = cls._GetServiceObject() cls._VerifyBranch(service_obj, branch, build_id, target) # Delete partially downloaded file. osutils.SafeUnlink(dest_file) build_type = cls._GetBuildType(build_id) # TODO(dshi): Add retry logic here to avoid API flakes. download_req = service_obj.buildartifact().get_media( buildType=build_type, buildId=build_id, target=target, attemptId='latest', resourceId=resource_id) with io.FileIO(dest_file, mode='wb') as fh: downloader = googleapiclient.http.MediaIoBaseDownload( fh, download_req, chunksize=DEFAULT_CHUNKSIZE) done = None while not done: _, done = downloader.next_chunk(num_retries=MAX_ATTEMPTS)
def _CleanTargetDirectory(directory): """Remove any existing generated files in the directory. This clean only removes the generated files to avoid accidentally destroying __init__.py customizations down the line. That will leave otherwise empty directories in place if things get moved. Neither case is relevant at the time of writing, but lingering empty directories seemed better than diagnosing accidental __init__.py changes. Args: directory (str): Path to be cleaned up. """ logging.info('Cleaning old files.') for dirpath, _dirnames, filenames in os.walk(directory): old = [ os.path.join(dirpath, f) for f in filenames if f.endswith('_pb2.py') ] # Remove empty init files to clean up otherwise empty directories. if '__init__.py' in filenames: init = os.path.join(dirpath, '__init__.py') if not osutils.ReadFile(init): old.append(init) for current in old: osutils.SafeUnlink(current)
def PerformStage(self): # This stage runs only in builders where |android_rev| config is set, # namely Android PFQ and pre-flight-branch builders. if not self._android_rev: logging.info('Not uprevving Android.') return android_package = self._run.config.android_package android_build_branch = self._run.config.android_import_branch android_version = _GetAndroidVersionFromMetadata( self._run.attrs.metadata) android_gts_build_branch = self._run.config.android_gts_build_branch assert android_package assert android_build_branch # |android_version| is usually set by MasterSlaveLKGMSyncStage, but we allow # it to be unset to indicate uprev'ing to the latest version. In fact, it is # not set in trybots. # |android_gts_build_branch| is not set if this builder is not supposed to # upload GTS bundles. logging.info('Android package: %s', android_package) logging.info('Android branch: %s', android_build_branch) logging.info('Android version: %s', android_version or 'LATEST') logging.info('Android GTS branch: %s', android_gts_build_branch or 'N/A') try: android_atom_to_build = commands.MarkAndroidAsStable( buildroot=self._build_root, tracking_branch=self._run.manifest_branch, android_package=android_package, android_build_branch=android_build_branch, boards=self._boards, android_version=android_version, android_gts_build_branch=android_gts_build_branch) except commands.AndroidIsPinnedUprevError as e: # If uprev failed due to a pin, record that failure (so that the # build ultimately fails) but try again without the pin, to allow the # slave to test the newer version anyway). android_atom_to_build = e.new_android_atom results_lib.Results.Record(self.name, e) logging.PrintBuildbotStepFailure() logging.error( 'Android is pinned. Unpinning Android and continuing ' 'build for Android atom %s. This stage will be marked ' 'as failed to prevent an uprev.', android_atom_to_build) logging.info('Deleting pin file at %s and proceeding.', ANDROIDPIN_MASK_PATH) osutils.SafeUnlink(ANDROIDPIN_MASK_PATH) logging.info('New Android package atom: %s', android_atom_to_build) if (not android_atom_to_build and self._run.options.buildbot and self._run.config.build_type == constants.ANDROID_PFQ_TYPE): logging.info('Android already uprevved. Nothing else to do.') raise failures_lib.ExitEarlyException( 'UprevAndroidStage finished and exited early.')
def RetryHandler(*args, **kwargs): """Retry func with given args/kwargs RETRY_ATTEMPTS times.""" warning_msgs = [] for i in xrange(0, RETRY_ATTEMPTS + 1): try: return func(*args, **kwargs) except GSLibError as ex: # On the last try just pass the exception on up. if i >= RETRY_ATTEMPTS: raise error_msg = str(ex) RESUMABLE_ERROR_MESSAGE = ( gs.GSContext.RESUMABLE_DOWNLOAD_ERROR, gs.GSContext.RESUMABLE_UPLOAD_ERROR, 'ResumableUploadException', 'ResumableDownloadException', 'ssl.SSLError: The read operation timed out', ) if (func.__name__ == 'Copy' and any(x in error_msg for x in RESUMABLE_ERROR_MESSAGE)): logging.info( 'Resumable download/upload exception occured for %s', args[1]) # Pass the dest_path to get the tracker filename. tracker_filenames = gs.GSContext.GetTrackerFilenames( args[1]) # This part of the code is copied from chromite.lib.gs with # slight modifications. This is a temporary solution until # we can deprecate crostools.lib.gslib (crbug.com/322740). logging.info('Potential list of tracker files: %s', tracker_filenames) for tracker_filename in tracker_filenames: tracker_file_path = os.path.join( gs.GSContext.DEFAULT_GSUTIL_TRACKER_DIR, tracker_filename) if os.path.exists(tracker_file_path): logging.info( 'Deleting gsutil tracker file %s before retrying.', tracker_file_path) logging.info('The content of the tracker file: %s', osutils.ReadFile(tracker_file_path)) osutils.SafeUnlink(tracker_file_path) else: if 'AccessDeniedException' in str( ex) or 'NoSuchKey' in str(ex): raise # Record a warning message to be issued if a retry actually helps. warning_msgs.append('Try %d failed with error message:\n%s' % (i + 1, ex)) else: # If the func succeeded, then log any accumulated warning messages. if warning_msgs: logging.warning('Failed %s %d times before success:\n%s', func.__name__, len(warning_msgs), '\n'.join(warning_msgs))
def PerformStage(self): chrome_atom_to_build = None if self._chrome_rev: if (self._chrome_rev == constants.CHROME_REV_SPEC and self._run.options.chrome_version): self.chrome_version = self._run.options.chrome_version logging.info('Using chrome version from options.chrome_version: %s', self.chrome_version) else: self.chrome_version = self._GetChromeVersionFromMetadata() if self.chrome_version: logging.info('Using chrome version from the metadata dictionary: %s', self.chrome_version) # Perform chrome uprev. try: chrome_atom_to_build = commands.MarkChromeAsStable( self._build_root, self._run.manifest_branch, self._chrome_rev, self._boards, chrome_version=self.chrome_version) except commands.ChromeIsPinnedUprevError as e: # If uprev failed due to a chrome pin, record that failure (so that the # build ultimately fails) but try again without the pin, to allow the # slave to test the newer chrome anyway). chrome_atom_to_build = e.new_chrome_atom if chrome_atom_to_build: results_lib.Results.Record(self.name, e) logging.PrintBuildbotStepFailure() logging.error('Chrome is pinned. Attempting to continue build for ' 'chrome atom %s anyway but build will ultimately fail.', chrome_atom_to_build) logging.info('Deleting pin file at %s and proceeding.', CHROMEPIN_MASK_PATH) osutils.SafeUnlink(CHROMEPIN_MASK_PATH) else: raise kwargs = {} if self._chrome_rev == constants.CHROME_REV_SPEC: kwargs['revision'] = self.chrome_version logging.PrintBuildbotStepText('revision %s' % kwargs['revision']) else: if not self.chrome_version: self.chrome_version = self._run.DetermineChromeVersion() kwargs['tag'] = self.chrome_version logging.PrintBuildbotStepText('tag %s' % kwargs['tag']) useflags = self._run.config.useflags commands.SyncChrome(self._build_root, self._run.options.chrome_root, useflags, **kwargs) if (self._chrome_rev and not chrome_atom_to_build and self._run.options.buildbot and self._run.config.build_type == constants.CHROME_PFQ_TYPE): logging.info('Chrome already uprevved. Nothing else to do.') raise failures_lib.ExitEarlyException( 'SyncChromeStage finished and exited early.')
def PerformStage(self): chrome_atom_to_build = None if self._chrome_rev: if (self._chrome_rev == constants.CHROME_REV_SPEC and self._run.options.chrome_version): self.chrome_version = self._run.options.chrome_version logging.info('Using chrome version from options.chrome_version: %s', self.chrome_version) else: self.chrome_version = self._GetChromeVersionFromMetadata() if self.chrome_version: logging.info('Using chrome version from the metadata dictionary: %s', self.chrome_version) # Perform chrome uprev. try: chrome_atom_to_build = commands.MarkChromeAsStable( self._build_root, self._run.manifest_branch, self._chrome_rev, self._boards, chrome_version=self.chrome_version) except commands.ChromeIsPinnedUprevError as e: # If uprev failed due to a chrome pin, record that failure (so that the # build ultimately fails) but try again without the pin, to allow the # slave to test the newer chrome anyway). chrome_atom_to_build = e.new_chrome_atom if chrome_atom_to_build: results_lib.Results.Record(self.name, e) logging.PrintBuildbotStepFailure() logging.error('Chrome is pinned. Unpinning chrome and continuing ' 'build for chrome atom %s. This stage will be marked ' 'as failed to prevent an uprev.', chrome_atom_to_build) logging.info('Deleting pin file at %s and proceeding.', CHROMEPIN_MASK_PATH) osutils.SafeUnlink(CHROMEPIN_MASK_PATH) else: raise kwargs = {} if self._chrome_rev == constants.CHROME_REV_SPEC: kwargs['revision'] = self.chrome_version logging.PrintBuildbotStepText('revision %s' % kwargs['revision']) else: if not self.chrome_version: self.chrome_version = self._run.DetermineChromeVersion() kwargs['tag'] = self.chrome_version logging.PrintBuildbotStepText('tag %s' % kwargs['tag']) useflags = self._run.config.useflags git_cache_dir = ( self._run.options.chrome_preload_dir or self._run.options.git_cache_dir) commands.SyncChrome(self._build_root, self._run.options.chrome_root, useflags, git_cache_dir=git_cache_dir, **kwargs)
def RemovePackageMask(target): """Removes a package.mask file for the given platform. The pre-existing package.mask files can mess with the keywords. args: target - the target for which to remove the file """ maskfile = os.path.join('/etc/portage/package.mask', 'cross-' + target) osutils.SafeUnlink(maskfile)
def RemoveBzipPackages(autotest_sysroot): """Remove all bzipped test/dep/profiler packages from sysroot autotest. Arguments: autotest_sysroot: Absolute path of autotest in the sysroot, e.g. '/build/lumpy/usr/local/autotest' """ osutils.RmDir(os.path.join(autotest_sysroot, 'packages'), ignore_missing=True) osutils.SafeUnlink(os.path.join(autotest_sysroot, 'packages.checksum'))
def RemovePackageMask(target): """Removes a package.mask file for the given platform. The pre-existing package.mask files can mess with the keywords. Args: target: The target to operate on (e.g. i686-pc-linux-gnu) """ maskfile = os.path.join('/etc/portage/package.mask', 'cross-' + target) osutils.SafeUnlink(maskfile)
def UpdateTargets(targets, usepkg, root='/'): """Determines which packages need update/unmerge and defers to portage. Args: targets: The list of targets to update usepkg: Copies the commandline option root: The install root in which we want packages updated. """ # Remove keyword files created by old versions of cros_setup_toolchains. osutils.SafeUnlink('/etc/portage/package.keywords/cross-host') # For each target, we do two things. Figure out the list of updates, # and figure out the appropriate keywords/masks. Crossdev will initialize # these, but they need to be regenerated on every update. logging.info('Determining required toolchain updates...') mergemap = {} for target in targets: logging.debug('Updating target %s', target) # Record the highest needed version for each target, for masking purposes. RemovePackageMask(target) for package in GetTargetPackages(target): # Portage name for the package if IsPackageDisabled(target, package): logging.debug(' Skipping disabled package %s', package) continue logging.debug(' Updating package %s', package) pkg = GetPortagePackage(target, package) current = GetInstalledPackageVersions(pkg, root=root) desired = GetDesiredPackageVersions(target, package) desired_num = VersionListToNumeric(target, package, desired, False) mergemap[pkg] = set(desired_num).difference(current) packages = [] for pkg in mergemap: for ver in mergemap[pkg]: if ver != PACKAGE_NONE: packages.append(pkg) if not packages: logging.info('Nothing to update!') return False logging.info('Updating packages:') logging.info('%s', packages) cmd = [EMERGE_CMD, '--oneshot', '--update'] if usepkg: cmd.extend(['--getbinpkg', '--usepkgonly']) if root != '/': cmd.extend(['--sysroot=%s' % root, '--root=%s' % root]) cmd.extend(packages) cros_build_lib.RunCommand(cmd) return True
def UnpinVersion(_input_proto, _output_proto, _config): """Unpin the Android version. See AndroidService documentation in api/proto/android.proto. Args: _input_proto (UnpinVersionRequest): The input proto. (not used.) _output_proto (UnpinVersionResponse): The output proto. (not used.) _config (api_config.ApiConfig): The call config. """ osutils.SafeUnlink(ANDROIDPIN_MASK_PATH)
def SetLastBuildState(root, new_state): """Save the state of the last build under |root|. Args: root: Root of the working directory tree as a string. new_state: BuildSummary object containing the state to be saved. """ state_file = os.path.join(root, BUILDER_STATE_FILENAME) osutils.WriteFile(state_file, new_state.to_json()) # Remove old state file. Its contents have been migrated into the new file. old_state_file = os.path.join(root, '.cbuildbot_launch_state') osutils.SafeUnlink(old_state_file)
def ClearResignFlag(image): """Remove any /root/.need_to_be_signed file from the rootfs. Args: image: image_lib.LoopbackPartitions instance for this image. """ # Check and clear the need_to_resign tag file. rootfs_dir = image.Mount(('ROOT-A', ), mount_opts=('rw', ))[0] needs_to_be_signed = os.path.join(rootfs_dir, 'root/.need_to_be_signed') if os.path.exists(needs_to_be_signed): image.Mount(('ROOT-A', ), mount_opts=('remount', 'rw')) osutils.SafeUnlink(needs_to_be_signed, sudo=True) image.Unmount(('ROOT-A', ))
def run(self): """Kicks off devserver in a separate process and waits for it to finish.""" # Truncate the log file if it already exists. if os.path.exists(self.log_file): osutils.SafeUnlink(self.log_file, sudo=True) path_resolver = path_util.ChrootPathResolver() port = self.port if self.port else 0 cmd = [ self.devserver_bin, '--pidfile', path_resolver.ToChroot(self._pid_file), '--logfile', path_resolver.ToChroot(self.log_file), '--port=%d' % port, '--static_dir=%s' % path_resolver.ToChroot(self.static_dir) ] if not self.port: cmd.append('--portfile=%s' % path_resolver.ToChroot(self.port_file)) if self.src_image: cmd.append('--src_image=%s' % path_resolver.ToChroot(self.src_image)) if self.board: cmd.append('--board=%s' % self.board) chroot_args = ['--no-ns-pid'] # The chromite bin directory is needed for cros_generate_update_payload. extra_env = { 'PATH': '%s:%s' % (os.environ['PATH'], path_resolver.ToChroot(constants.CHROMITE_BIN_DIR)) } result = self._RunCommand(cmd, enter_chroot=True, chroot_args=chroot_args, cwd=constants.SOURCE_ROOT, extra_env=extra_env, check=False, stdout=True, stderr=subprocess.STDOUT, encoding='utf-8') if result.returncode != 0: msg = ('Devserver failed to start!\n' '--- Start output from the devserver startup command ---\n' '%s' '--- End output from the devserver startup command ---' % result.output) logging.error(msg)
def UpdateTargets(targets, usepkg, getbinpkg=True): """Determines which packages need update/unmerge and defers to portage. args: targets - the list of targets to update usepkg - copies the commandline option getbinpkg - copies the commandline option """ # Remove keyword files created by old versions of cros_setup_toolchains. osutils.SafeUnlink('/etc/portage/package.keywords/cross-host') # For each target, we do two things. Figure out the list of updates, # and figure out the appropriate keywords/masks. Crossdev will initialize # these, but they need to be regenerated on every update. print 'Determining required toolchain updates...' mergemap = {} for target in targets: # Record the highest needed version for each target, for masking purposes. RemovePackageMask(target) for package in GetTargetPackages(target): # Portage name for the package if IsPackageDisabled(target, package): continue pkg = GetPortagePackage(target, package) current = GetInstalledPackageVersions(pkg) desired = GetDesiredPackageVersions(target, package) desired_num = VersionListToNumeric(target, package, desired, False) mergemap[pkg] = set(desired_num).difference(current) packages = [] for pkg in mergemap: for ver in mergemap[pkg]: if ver != PACKAGE_NONE: packages.append(pkg) if not packages: print 'Nothing to update!' return False print 'Updating packages:' print packages cmd = [EMERGE_CMD, '--oneshot', '--update'] if usepkg: if getbinpkg: cmd.append('--getbinpkg') cmd.append('--usepkgonly') cmd.extend(packages) cros_build_lib.RunCommand(cmd) return True
def ClearPythonCacheFiles(): """Clear cache files in the chromite repo. When switching branches, modules can be deleted or renamed, but the old pyc files stick around and confuse Python. This is a bit of a hack, but should be good enough for now. """ result = cros_build_lib.dbg_run( ['git', 'ls-tree', '-r', '-z', '--name-only', 'HEAD'], encoding='utf-8', capture_output=True) for subdir in set(os.path.dirname(x) for x in result.stdout.split('\0')): for path in glob.glob(os.path.join(subdir, '*.pyc')): osutils.SafeUnlink(path) osutils.RmDir(os.path.join(subdir, '__pycache__'), ignore_missing=True)
def main(argv): parser = commandline.ArgumentParser('%%(prog)s [options] <version>\n\n%s' % __doc__, caching=True) parser.add_argument('version', nargs=1) parser.add_argument('--path', default=None, type='path', help='Path of files dir with external_extensions.json') parser.add_argument('--create', default=False, action='store_true', help='Create cache tarball with specified name') parser.add_argument('--upload', default=False, action='store_true', help='Upload cache tarball with specified name') options = parser.parse_args(argv) if options.path: os.chdir(options.path) if not (options.create or options.upload): cros_build_lib.Die('Need at least --create or --upload args') if not os.path.exists('external_extensions.json'): cros_build_lib.Die( 'No external_extensions.json in %s. Did you forget the ' '--path option?', os.getcwd()) identifier = options.version[0] tarball = '%s.tar.xz' % identifier if options.create: extensions = json.load(open('external_extensions.json', 'r')) with osutils.TempDir() as tempdir: CreateCacheTarball(extensions, tempdir, identifier, os.path.abspath(tarball)) if options.upload: ctx = gs.GSContext() url = os.path.join(UPLOAD_URL_BASE, tarball) if ctx.Exists(url): cros_build_lib.Die( 'This version already exists on Google Storage (%s)!\n' 'NEVER REWRITE EXISTING FILE. IT WILL BREAK CHROME OS ' 'BUILD!!!', url) ctx.Copy(os.path.abspath(tarball), url, acl='project-private') logging.info('Tarball uploaded %s', url) osutils.SafeUnlink(os.path.abspath(tarball))
def _WriteLinesToFile(path, lines, line_prefix, line_suffix): """Write a set of lines to a file, adding prefixes, suffixes and newlines. Args: path: path to file. lines: iterable of lines to write. line_prefix: string to prefix each line with. line_suffix: string to append to each line before a newline. """ contents = ''.join( ['%s%s%s\n' % (line_prefix, line, line_suffix) for line in lines]) if not contents: osutils.SafeUnlink(path) else: osutils.WriteFile(path, contents, makedirs=True)
def Cleanup(self, silent=False): """Wait for a process to exit.""" if os.getpid() != self._parent_pid or self._output is None: return try: # Print output from subprocess. if not silent and logging.getLogger().isEnabledFor(logging.DEBUG): with open(self._output.name, 'r') as f: for line in f: logging.debug(line.rstrip('\n')) finally: # Clean up our temporary file. osutils.SafeUnlink(self._output.name) self._output.close() self._output = None
def SetupArchivePath(self): """Create a fresh directory for archiving a build.""" logging.info('Preparing local archive directory at "%s".', self.archive_path) if self._options.buildbot: # Buildbot: Clear out any leftover build artifacts, if present, for # this particular run. The Clean stage is responsible for trimming # back the number of archive paths to the last X runs. osutils.RmDir(self.archive_path, ignore_missing=True) else: # Clear the list of uploaded file if it exists. In practice, the Clean # stage deletes everything in the archive root, so this may not be # doing anything at all. osutils.SafeUnlink(os.path.join(self.archive_path, commands.UPLOADED_LIST_FILENAME)) osutils.SafeMakedirs(self.archive_path)