def GeneratePayload(input_proto, output_proto, config): """Generate a update payload ('do paygen'). Args: input_proto (PayloadGenerationRequest): Input proto. output_proto (PayloadGenerationResult): Output proto. config (api.config.ApiConfig): The API call config. Returns: A controller return code (e.g. controller.RETURN_CODE_SUCCESS). """ # Resolve the tgt image oneof. tgt_name = input_proto.WhichOneof('tgt_image_oneof') try: tgt_image = getattr(input_proto, tgt_name) except AttributeError: cros_build_lib.Die('%s is not a known tgt image type' % (tgt_name, )) # Resolve the src image oneof. src_name = input_proto.WhichOneof('src_image_oneof') # If the source image is 'full_update' we lack a source entirely. if src_name == 'full_update': src_image = None # Otherwise we have an image. else: try: src_image = getattr(input_proto, src_name) except AttributeError: cros_build_lib.Die('%s is not a known src image type' % (src_name, )) # Ensure they are compatible oneofs. if (src_name, tgt_name) not in _VALID_IMAGE_PAIRS: cros_build_lib.Die('%s and %s are not valid image pairs' % (src_image, tgt_image)) # Find the value of bucket or default to 'chromeos-releases'. destination_bucket = input_proto.bucket or 'chromeos-releases' # There's a potential that some paygen_lib library might raise here, but since # we're still involved in config we'll keep it before the validate_only. payload_config = payload.PayloadConfig(tgt_image, src_image, destination_bucket, input_proto.verify, input_proto.keyset) # If configured for validation only we're done here. if config.validate_only: return controller.RETURN_CODE_VALID_INPUT # Do payload generation. paygen_ok = payload_config.GeneratePayload() _SetGeneratePayloadOutputProto(output_proto, paygen_ok) if paygen_ok: return controller.RETURN_CODE_SUCCESS else: return controller.RETURN_CODE_COMPLETED_UNSUCCESSFULLY
def _ParseImagesToCreate(to_build): """Helper function to parse the image types to build. This function exists just to clean up the Create function. Args: to_build (list[int]): The image type list. Returns: (set, set): The image and vm types, respectively, that need to be built. """ image_types = set() vm_types = set() for current in to_build: if current in _IMAGE_MAPPING: image_types.add(_IMAGE_MAPPING[current]) elif current in _VM_IMAGE_MAPPING: vm_types.add(current) # Make sure we build the image required to build the VM. image_types.add(_VM_IMAGE_MAPPING[current]) else: # Not expected, but at least it will be obvious if this comes up. cros_build_lib.Die( "The service's known image types do not match those in image.proto. " 'Unknown Enum ID: %s' % current) # We can only build one type of these images at a time since image_to_vm.sh # uses the default path if a name is not provided. if vm_types.issuperset({_BASE_VM_ID, _TEST_VM_ID}): cros_build_lib.Die('Cannot create more than one VM.') return image_types, vm_types
def BundleSimpleChromeArtifacts(input_proto, output_proto, _config): """Create the simple chrome artifacts.""" # Required args. sysroot_path = input_proto.sysroot.path build_target_name = input_proto.sysroot.build_target.name output_dir = input_proto.output_dir # Optional args. chroot_path = input_proto.chroot.path or constants.DEFAULT_CHROOT_PATH cache_dir = input_proto.chroot.cache_dir # Build out the argument instances. build_target = build_target_util.BuildTarget(build_target_name) chroot = chroot_lib.Chroot(path=chroot_path, cache_dir=cache_dir) # Sysroot.path needs to be the fully qualified path, including the chroot. full_sysroot_path = os.path.join(chroot.path, sysroot_path.lstrip(os.sep)) sysroot = sysroot_lib.Sysroot(full_sysroot_path) # Quick sanity check that the sysroot exists before we go on. if not sysroot.Exists(): cros_build_lib.Die('The sysroot does not exist.') try: results = artifacts.BundleSimpleChromeArtifacts(chroot, sysroot, build_target, output_dir) except artifacts.Error as e: cros_build_lib.Die('Error %s raised in BundleSimpleChromeArtifacts: %s', type(e), e) for file_name in results: output_proto.artifacts.add().path = file_name
def _PostParseCheck(options, _args): """Perform some usage validation (after we've parsed the arguments Args: options/args: The options/args object returned by optparse """ if options.local_pkg_path and not os.path.isfile(options.local_pkg_path): cros_build_lib.Die('%s is not a file.', options.local_pkg_path) if not options.gyp_defines: gyp_env = os.getenv('GYP_DEFINES', None) if gyp_env is not None: options.gyp_defines = chrome_util.ProcessGypDefines(gyp_env) logging.debug('GYP_DEFINES taken from environment: %s', options.gyp_defines) if options.strict and not options.gyp_defines: cros_build_lib.Die('When --strict is set, the GYP_DEFINES environment ' 'variable must be set.') if options.build_dir: chrome_path = os.path.join(options.build_dir, 'chrome') if os.path.isfile(chrome_path): deps = lddtree.ParseELF(chrome_path) if 'libbase.so' in deps['libs']: cros_build_lib.Warning( 'Detected a component build of Chrome. component build is ' 'not working properly for Chrome OS. See crbug.com/196317. ' 'Use at your own risk!')
def BundleAFDOGenerationArtifacts(input_proto, output_proto, _config): """Generic function for creating tarballs of both AFDO and orderfile. Args: input_proto (BundleChromeAFDORequest): The input proto. output_proto (BundleResponse): The output proto. _config (api_config.ApiConfig): The API call config. """ # Required args. build_target = build_target_util.BuildTarget(input_proto.build_target.name) chrome_root = input_proto.chroot.chrome_dir if not chrome_root: cros_build_lib.Die('chrome_root is not included in chroot') output_dir = input_proto.output_dir artifact_type = input_proto.artifact_type chroot = controller_util.ParseChroot(input_proto.chroot) try: is_orderfile = bool(artifact_type is toolchain_pb2.ORDERFILE) results = artifacts.BundleAFDOGenerationArtifacts( is_orderfile, chroot, chrome_root, build_target, output_dir) except artifacts.Error as e: cros_build_lib.Die('Error %s raised in BundleSimpleChromeArtifacts: %s', type(e), e) for file_name in results: output_proto.artifacts.add().path = file_name
def GetVersion(self, srcroot, manifest, default): """Get the base version number for this ebuild. The version is provided by the ebuild through a specific script in the $FILESDIR (chromeos-version.sh). """ vers_script = os.path.join( os.path.dirname(self._ebuild_path_no_version), 'files', 'chromeos-version.sh') if not os.path.exists(vers_script): return default srcdirs = self.GetSourcePath(srcroot, manifest)[1] # The chromeos-version script will output a usable raw version number, # or nothing in case of error or no available version try: output = self._RunCommand([vers_script] + srcdirs).strip() except cros_build_lib.RunCommandError as e: cros_build_lib.Die('Package %s chromeos-version.sh failed: %s' % (self._pkgname, e)) if not output: cros_build_lib.Die( 'Package %s has a chromeos-version.sh script but ' 'it returned no valid version for "%s"' % (self._pkgname, ' '.join(srcdirs))) return output
def BundleSimpleChromeArtifacts(input_proto, output_proto, _config): """Create the simple chrome artifacts.""" sysroot_path = input_proto.sysroot.path output_dir = input_proto.output_dir # Build out the argument instances. build_target = controller_util.ParseBuildTarget( input_proto.sysroot.build_target) chroot = controller_util.ParseChroot(input_proto.chroot) # Sysroot.path needs to be the fully qualified path, including the chroot. full_sysroot_path = os.path.join(chroot.path, sysroot_path.lstrip(os.sep)) sysroot = sysroot_lib.Sysroot(full_sysroot_path) # Quick sanity check that the sysroot exists before we go on. if not sysroot.Exists(): cros_build_lib.Die('The sysroot does not exist.') try: results = artifacts.BundleSimpleChromeArtifacts( chroot, sysroot, build_target, output_dir) except artifacts.Error as e: cros_build_lib.Die( 'Error %s raised in BundleSimpleChromeArtifacts: %s', type(e), e) for file_name in results: output_proto.artifacts.add().path = file_name
def BundleFpmcuUnittests(input_proto, output_proto, _config): """Tar the fingerprint MCU unittest binaries for a build target. Args: input_proto (BundleRequest): The input proto. output_proto (BundleResponse): The output proto. _config (api_config.ApiConfig): The API call config. """ output_dir = input_proto.output_dir chroot = controller_util.ParseChroot(input_proto.chroot) sysroot_path = input_proto.sysroot.path sysroot = sysroot_lib.Sysroot(sysroot_path) if not chroot.exists(): cros_build_lib.Die('Chroot does not exist: %s', chroot.path) elif not sysroot.Exists(chroot=chroot): cros_build_lib.Die('Sysroot does not exist: %s', chroot.full_path(sysroot.path)) archive = artifacts.BundleFpmcuUnittests(chroot, sysroot, output_dir) if archive is None: logging.warning('No fpmcu unittests found for %s.', sysroot_path) return output_proto.artifacts.add().path = archive
def BundleFirmware(input_proto, output_proto, _config): """Tar the firmware images for a build target. Args: input_proto (BundleRequest): The input proto. output_proto (BundleResponse): The output proto. _config (api_config.ApiConfig): The API call config. """ output_dir = input_proto.output_dir chroot = controller_util.ParseChroot(input_proto.chroot) sysroot_path = input_proto.sysroot.path sysroot = sysroot_lib.Sysroot(sysroot_path) if not chroot.exists(): cros_build_lib.Die('Chroot does not exist: %s', chroot.path) elif not sysroot.Exists(chroot=chroot): cros_build_lib.Die('Sysroot does not exist: %s', chroot.full_path(sysroot.path)) archive = artifacts.BuildFirmwareArchive(chroot, sysroot, output_dir) if archive is None: cros_build_lib.Die( 'Could not create firmware archive. No firmware found for %s.', sysroot_path) output_proto.artifacts.add().path = archive
def DebugInfoTest(input_proto, _output_proto, config): """Run the debug info tests.""" sysroot_path = input_proto.sysroot.path target_name = input_proto.sysroot.build_target.name if not sysroot_path: if target_name: sysroot_path = cros_build_lib.GetSysroot(target_name) else: cros_build_lib.Die( "The sysroot path or the sysroot's build target name " 'must be provided.') # We could get away with out this, but it's a cheap check. sysroot = sysroot_lib.Sysroot(sysroot_path) if not sysroot.Exists(): cros_build_lib.Die('The provided sysroot does not exist.') if config.validate_only: return controller.RETURN_CODE_VALID_INPUT if test.DebugInfoTest(sysroot_path): return controller.RETURN_CODE_SUCCESS else: return controller.RETURN_CODE_COMPLETED_UNSUCCESSFULLY
def Run(self): """Run cros debug.""" commandline.RunInsideChroot(self) self.options.Freeze() self._ReadOptions() with remote_access.ChromiumOSDeviceHandler( self.ssh_hostname, port=self.ssh_port, username=self.ssh_username, private_key=self.ssh_private_key) as device: self.board = cros_build_lib.GetBoard(device_board=device.board, override_board=self.options.board, strict=True) logging.info('Board is %s', self.board) self.gdb_cmd = [ 'gdb_remote', '--ssh', '--board', self.board, '--remote', self.ssh_hostname, ] if self.ssh_port: self.gdb_cmd.extend(['--ssh_port', str(self.ssh_port)]) if not (self.pid or self.exe): cros_build_lib.Die( 'Must use --exe or --pid to specify the process to debug.') if self.pid: if self.list or self.exe: cros_build_lib.Die( '--list and --exe are disallowed when --pid is used.') self._DebugRunningProcess(self.pid) return if not self.exe.startswith('/'): cros_build_lib.Die('--exe must have a full pathname.') logging.debug('Executable path is %s', self.exe) if not device.IsFileExecutable(self.exe): cros_build_lib.Die( 'File path "%s" does not exist or is not executable on device %s', self.exe, self.ssh_hostname) pids = device.GetRunningPids(self.exe) self._ListProcesses(device, pids) if self.list: # If '--list' flag is on, do not launch GDB. return if pids: choices = ['Start a new process under GDB'] choices.extend(pids) idx = cros_build_lib.GetChoice( 'Please select the process pid to debug (select [0] to start a ' 'new process):', choices) if idx == 0: self._DebugNewProcess() else: self._DebugRunningProcess(pids[idx - 1]) else: self._DebugNewProcess()
def GetSourcePath(self, srcroot): """Get the project and path for this ebuild. The path is guaranteed to exist, be a directory, and be absolute. """ workon_vars = ( 'CROS_WORKON_LOCALNAME', 'CROS_WORKON_PROJECT', 'CROS_WORKON_SUBDIR', ) env = { 'CROS_WORKON_LOCALNAME': self._pkgname, 'CROS_WORKON_PROJECT': self._pkgname, 'CROS_WORKON_SUBDIR': '', } settings = osutils.SourceEnvironment(self._unstable_ebuild_path, workon_vars, env=env) localnames = settings['CROS_WORKON_LOCALNAME'].split(',') projects = settings['CROS_WORKON_PROJECT'].split(',') subdirs = settings['CROS_WORKON_SUBDIR'].split(',') # Sanity checks and completion. # Each project specification has to have the same amount of items. if len(projects) != len(localnames): raise EbuildFormatIncorrectException(self._unstable_ebuild_path, 'Number of _PROJECT and _LOCALNAME items don\'t match.') # Subdir must be either 0,1 or len(project) if len(projects) != len(subdirs) and len(subdirs) > 1: raise EbuildFormatIncorrectException(self._unstable_ebuild_path, 'Incorrect number of _SUBDIR items.') # If there's one, apply it to all. if len(subdirs) == 1: subdirs = subdirs * len(projects) # If there is none, make an empty list to avoid exceptions later. if len(subdirs) == 0: subdirs = [''] * len(projects) # Calculate srcdir. if self._category == 'chromeos-base': dir_ = 'platform' else: dir_ = 'third_party' subdir_paths = [os.path.realpath(os.path.join(srcroot, dir_, l, s)) for l, s in zip(localnames, subdirs)] for subdir_path, project in zip(subdir_paths, projects): if not os.path.isdir(subdir_path): cros_build_lib.Die('Source repository %s ' 'for project %s does not exist.' % (subdir_path, self._pkgname)) # Verify that we're grabbing the commit id from the right project name. real_project = self.GetGitProjectName(subdir_path) if project != real_project: cros_build_lib.Die('Project name mismatch for %s ' '(found %s, expected %s)' % (subdir_path, real_project, project)) return projects, subdir_paths
def main(argv): parser = GetParser() options = parser.parse_args(argv) options.Freeze() if options.command == 'list-all': board_to_packages = workon_helper.ListAllWorkedOnAtoms() color = terminal.Color() for board in sorted(board_to_packages): print(color.Start(color.GREEN) + board + ':' + color.Stop()) for package in board_to_packages[board]: print(' ' + package) print('') return 0 # TODO(wiley): Assert that we're not running as root. cros_build_lib.AssertInsideChroot() if options.host: friendly_name = 'host' sysroot = '/' elif options.board: friendly_name = options.board sysroot = cros_build_lib.GetSysroot(board=options.board) else: cros_build_lib.Die('You must specify either --host, --board') helper = workon_helper.WorkonHelper(sysroot, friendly_name) try: if options.command == 'start': helper.StartWorkingOnPackages(options.packages, use_all=options.all, use_workon_only=options.workon_only) elif options.command == 'stop': helper.StopWorkingOnPackages(options.packages, use_all=options.all, use_workon_only=options.workon_only) elif options.command == 'info': triples = helper.GetPackageInfo( options.packages, use_all=options.all, use_workon_only=options.workon_only) for package, repos, paths in triples: print(package, ','.join(repos), ','.join(paths)) elif options.command == 'list': packages = helper.ListAtoms(use_all=options.all, use_workon_only=options.workon_only) if packages: print('\n'.join(packages)) elif options.command == 'iterate': helper.RunCommandInPackages(options.packages, options.iterate_command, use_all=options.all, use_workon_only=options.workon_only) except workon_helper.WorkonError as e: cros_build_lib.Die(e) return 0
def FinishParsing(options): """Run environment dependent checks on parsed args.""" target = os.path.join(options.out_dir, options.out_file) if os.path.exists(target): cros_build_lib.Die('Output file %r already exists.' % target) if not os.path.isdir(options.out_dir): cros_build_lib.Die( 'Non-existent directory %r specified for --out-dir' % options.out_dir)
def CreateCacheTarball(extensions, outputdir, identifier, tarball): """Cache |extensions| in |outputdir| and pack them in |tarball|.""" crxdir = os.path.join(outputdir, 'crx') jsondir = os.path.join(outputdir, 'json') validationdir = os.path.join(outputdir, 'validation') osutils.SafeMakedirs(os.path.join(crxdir, 'extensions', 'managed_users')) osutils.SafeMakedirs(os.path.join(jsondir, 'extensions', 'managed_users')) osutils.SafeMakedirs(os.path.join(jsondir, 'extensions', 'child_users')) was_errors = False for ext in extensions: managed_users = extensions[ext].get('managed_users', 'no') cache_crx = extensions[ext].get('cache_crx', 'yes') child_users = extensions[ext].get('child_users', 'no') # Remove fields that shouldn't be in the output file. for key in ('cache_crx', 'managed_users'): extensions[ext].pop(key, None) if cache_crx == 'yes': if not DownloadCrx(ext, extensions[ext], crxdir): was_errors = True elif cache_crx == 'no': pass else: cros_build_lib.Die('Unknown value for "cache_crx" %s for %s', cache_crx, ext) if managed_users == 'yes': json_file = os.path.join(jsondir, 'extensions/managed_users/%s.json' % ext) json.dump(extensions[ext], open(json_file, 'w'), sort_keys=True, indent=2, separators=(',', ': ')) if managed_users != 'only': target_json_dir = 'extensions' if child_users == 'yes': target_json_dir = 'extensions/child_users' json_file = os.path.join(jsondir, target_json_dir, '%s.json' % ext) json.dump(extensions[ext], open(json_file, 'w'), sort_keys=True, indent=2, separators=(',', ': ')) if was_errors: cros_build_lib.Die('FAIL to download some extensions') CreateValidationFiles(validationdir, crxdir, identifier) cros_build_lib.CreateTarball(tarball, outputdir) logging.info('Tarball created %s', tarball)
def main(argv): parser = GetParser() options = parser.parse_args(argv) options.Freeze() snapshot_ref = options.snapshot_ref if snapshot_ref and not snapshot_ref.startswith('refs/'): snapshot_ref = BRANCH_REF_PREFIX + snapshot_ref repo = repo_util.Repository.Find(options.repo_path) if repo is None: cros_build_lib.Die('No repo found in --repo_path %r.', options.repo_path) manifest = repo.Manifest(revision_locked=True) projects = list(manifest.Projects()) # Check if projects need snapshots (in parallel). needs_snapshot_results = parallel.RunTasksInProcessPool( _NeedsSnapshot, [(repo.root, x) for x in projects]) # Group snapshot-needing projects by project name. snapshot_projects = {} for project, needs_snapshot in zip(projects, needs_snapshot_results): if needs_snapshot: snapshot_projects.setdefault(project.name, []).append(project) if snapshot_projects and not snapshot_ref: cros_build_lib.Die('Some project(s) need snapshot refs but no ' '--snapshot-ref specified.') # Push snapshot refs (in parallel). with parallel.BackgroundTaskRunner(_GitPushProjectUpstream, repo.root, dry_run=options.dry_run, processes=options.jobs) as queue: for projects in snapshot_projects.values(): # Since some projects (e.g. chromiumos/third_party/kernel) are checked out # multiple places, we may need to push each checkout to a unique ref. need_unique_refs = len(projects) > 1 used_refs = set() for project in projects: if need_unique_refs: ref = _MakeUniqueRef(project, snapshot_ref, used_refs) else: ref = snapshot_ref # Update the upstream ref both for the push and the output XML. project.upstream = ref queue.put([project]) dest = options.output_file if dest is None or dest == '-': dest = sys.stdout manifest.Write(dest)
def main(argv): parser = GetParser() options = parser.parse_args(argv) logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO) # Check that we have no uncommitted files, and that our checkout's HEAD is # contained in a remote branch. This is to ensure that we don't accidentally # run uncommitted migrations. uncommitted_files = git.RunGit(os.getcwd(), ['status', '-s']).output if uncommitted_files: cros_build_lib.Die('You appear to have uncommitted files. Aborting!') remote_branches = git.RunGit(os.getcwd(), ['branch', '-r', '--contains']).output if not remote_branches: cros_build_lib.Die( 'You appear to be on a local branch of chromite. Aborting!') if options.command == MIGRATE: positive_confirmation = 'please modify my database' warn = ( 'This option will apply schema changes to your existing database. ' 'You should not run this against the production database unless ' 'your changes are thoroughly tested, and those tests included ' 'in cidb_integration_test.py (including tests that old data is ' 'sanely migrated forward). Database corruption could otherwise ' 'result. Are you sure you want to proceed? If so, type "%s" ' 'now.\n') % positive_confirmation elif options.command == WIPE: positive_confirmation = 'please delete my data' warn = ( 'This operation will wipe (i.e. DELETE!) the entire contents of ' 'the database pointed at by %s. Are you sure you want to proceed? ' 'If so, type "%s" now.\n') % (os.path.join( options.cred_dir, 'host.txt'), positive_confirmation) else: cros_build_lib.Die('No command or unsupported command. Exiting.') print(warn) conf_string = cros_build_lib.GetInput('(%s)?: ' % positive_confirmation) if conf_string != positive_confirmation: cros_build_lib.Die('You changed your mind. Aborting.') if options.command == MIGRATE: print('OK, applying migrations...') db = cidb.CIDBConnection(options.cred_dir) db.ApplySchemaMigrations(maxVersion=options.migrate_version) elif options.command == WIPE: print('OK, wiping database...') db = cidb.CIDBConnection(options.cred_dir) db.DropDatabase() print('Done.')
def main(argv): opts = _ParseArgs(argv) try: sysroot.SetupBoard(opts.build_target, opts.accept_licenses, opts.run_config) except portage_util.MissingOverlayError as e: # Add a bit more user friendly message as people can typo names easily. cros_build_lib.Die( '%s\n' "Double check the --board setting and make sure you're syncing the " 'right manifest (internal-vs-external).', e) except sysroot.Error as e: cros_build_lib.Die(e)
def Run(self): """Run cros build.""" self.options.Freeze() if not self.host: if not (self.board or self.brick): cros_build_lib.Die('You did not specify a board/brick to build for. ' 'You need to be in a brick directory or set ' '--board/--brick/--host') if self.brick and self.brick.legacy: cros_build_lib.Die('--brick should not be used with board names. Use ' '--board=%s instead.' % self.brick.config['name']) if self.board: chroot_args = ['--board', self.board] else: chroot_args = None commandline.RunInsideChroot(self, chroot_args=chroot_args) if not (self.build_pkgs or self.options.init_only): cros_build_lib.Die('No packages found, nothing to build.') # Set up the sysroots if not building for host. if self.brick or self.board: chroot_util.SetupBoard( brick=self.brick, board=self.board, update_chroot=self.chroot_update, update_host_packages=self.options.host_packages_update, use_binary=self.options.binary) if not self.options.init_only: # Preliminary: enable all packages that only have a live ebuild. if self.options.enable_only_latest: workon = workon_helper.WorkonHelper(self.sysroot) workon.StartWorkingOnPackages([], use_workon_only=True) if command.UseProgressBar(): op = BrilloBuildOperation() op.Run( parallel.RunParallelSteps, [self._CheckDependencies, self._Build], log_level=logging.DEBUG) if self.options.test: self._Test() else: parallel.RunParallelSteps([self._CheckDependencies, self._Build]) if self.options.test: self._Test() logging.notice('Build completed successfully.')
def CreateComponent(manifest_path, version, package_name, package_version, platform, files, upload, gsbucket): """Create component zip file. Args: manifest_path: (str) path to raw manifest file. version: (str) component version. package_name: (str) the package name package_version: (str) package version. platform: (str) platform folder name on Omaha. files: ([str]) paths for component files. upload: (bool) whether to upload the generate component to Omaha. gsbucket: (str) Omaha gsbucket path. """ if not os.path.exists(manifest_path): cros_build_lib.Die('manifest file is missing: %s', manifest_path) with open(manifest_path) as f: # Construct final manifest file. data = json.load(f) data[MANIFEST_VERSION_FIELD] = version data[MANIFEST_PACKAGE_VERSION_FIELD] = package_version # Create final zip file of the component and store it to a temp folder. with osutils.TempDir(prefix='component_') as tempdir: component_folder = os.path.join(tempdir, data[MANIFEST_VERSION_FIELD], platform) os.makedirs(component_folder) component_zipfile = os.path.join(component_folder, COMPONENT_ZIP) zf = zipfile.ZipFile(component_zipfile, 'w', zipfile.ZIP_DEFLATED) # Move component files into zip file. for f in files: if os.path.isdir(f): AddDirectoryToZip(zf, f) else: zf.write(f, os.path.basename(f)) # Write manifest file into zip file. zf.writestr(MANIFEST_FILE_NAME, json.dumps(data)) logger.info('component is generated at %s', zf.filename) zf.close() # Upload component to gs bucket. if upload: if '9999' in package_version: cros_build_lib.Die( 'Cannot upload component while the %s package ' 'is being worked on.', package_name) UploadComponent( os.path.join(tempdir, data[MANIFEST_VERSION_FIELD]), gsbucket)
def main(argv): parser = commandline.ArgumentParser('%%(prog)s [options] <version>\n\n%s' % __doc__, caching=True) parser.add_argument('version', nargs=1) parser.add_argument('--path', default=None, type='path', help='Path of files dir with external_extensions.json') parser.add_argument('--create', default=False, action='store_true', help='Create cache tarball with specified name') parser.add_argument('--upload', default=False, action='store_true', help='Upload cache tarball with specified name') options = parser.parse_args(argv) if options.path: os.chdir(options.path) if not (options.create or options.upload): cros_build_lib.Die('Need at least --create or --upload args') if not os.path.exists('external_extensions.json'): cros_build_lib.Die( 'No external_extensions.json in %s. Did you forget the ' '--path option?', os.getcwd()) identifier = options.version[0] tarball = '%s.tar.xz' % identifier if options.create: extensions = json.load(open('external_extensions.json', 'r')) with osutils.TempDir() as tempdir: CreateCacheTarball(extensions, tempdir, identifier, os.path.abspath(tarball)) if options.upload: ctx = gs.GSContext() url = os.path.join(UPLOAD_URL_BASE, tarball) if ctx.Exists(url): cros_build_lib.Die( 'This version already exists on Google Storage (%s)!\n' 'NEVER REWRITE EXISTING FILE. IT WILL BREAK CHROME OS ' 'BUILD!!!', url) ctx.Copy(os.path.abspath(tarball), url, acl='project-private') logging.info('Tarball uploaded %s', url) osutils.SafeUnlink(os.path.abspath(tarball))
def RunCbuildbot(options): """Run a cbuildbot build. Args: options: Parsed cros tryjob tryjob arguments. Returns: Exit code of build as an int. """ if cros_build_lib.IsInsideChroot(): cros_build_lib.Die('cbuildbot tryjobs cannot be started inside the chroot.') args = CbuildbotArgs(options) if not CreateBuildrootIfNeeded(options.buildroot): return 1 # Define the command to run. cbuildbot = os.path.join(constants.CHROMITE_BIN_DIR, 'cbuildbot') cmd = [cbuildbot] + args + options.build_configs # Run the tryjob. result = cros_build_lib.run(cmd, debug_level=logging.CRITICAL, check=False, cwd=options.buildroot) return result.returncode
def _PostParseCheck(options): """Perform some usage validation (after we've parsed the arguments). Args: options: The options object returned by the cli parser. """ if options.local_pkg_path and not os.path.isfile(options.local_pkg_path): cros_build_lib.Die('%s is not a file.', options.local_pkg_path) if not options.gyp_defines: gyp_env = os.getenv('GYP_DEFINES') if gyp_env is not None: options.gyp_defines = chrome_util.ProcessGypDefines(gyp_env) logging.info('GYP_DEFINES taken from environment: %s', options.gyp_defines) if not options.gn_args: gn_env = os.getenv('GN_ARGS') if gn_env is not None: options.gn_args = gn_helpers.FromGNArgs(gn_env) logging.info('GN_ARGS taken from environment: %s', options.gn_args) if not options.staging_flags: use_env = os.getenv('USE') if use_env is not None: options.staging_flags = ' '.join( set(use_env.split()).intersection(chrome_util.STAGING_FLAGS)) logging.info('Staging flags taken from USE in environment: %s', options.staging_flags)
def Create(input_proto, output_proto, _config): """Build an image. Args: input_proto (image_pb2.CreateImageRequest): The input message. output_proto (image_pb2.CreateImageResult): The output message. _config (api_config.ApiConfig): The API call config. """ board = input_proto.build_target.name # Build the base image if no images provided. to_build = input_proto.image_types or [_BASE_ID] image_types, vm_types = _ParseImagesToCreate(to_build) build_config = _ParseCreateBuildConfig(input_proto) # Sorted isn't really necessary here, but it's much easier to test. result = image.Build(board=board, images=sorted(list(image_types)), config=build_config) output_proto.success = result.success if result.success: # Success -- we need to list out the images we built in the output. _PopulateBuiltImages(board, image_types, output_proto) if vm_types: for vm_type in vm_types: is_test = vm_type in [_TEST_VM_ID, _TEST_GUEST_VM_ID] try: if vm_type in [_BASE_GUEST_VM_ID, _TEST_GUEST_VM_ID]: vm_path = image.CreateGuestVm(board, is_test=is_test) else: vm_path = image.CreateVm( board, disk_layout=build_config.disk_layout, is_test=is_test) except image.ImageToVmError as e: cros_build_lib.Die(e) new_image = output_proto.images.add() new_image.path = vm_path new_image.type = vm_type new_image.build_target.name = board # Read metric events log and pipe them into output_proto.events. deserialize_metrics_log(output_proto.events, prefix=board) return controller.RETURN_CODE_SUCCESS else: # Failure, include all of the failed packages in the output when available. if not result.failed_packages: return controller.RETURN_CODE_COMPLETED_UNSUCCESSFULLY for package in result.failed_packages: current = output_proto.failed_packages.add() current.category = package.category current.package_name = package.package if package.version: current.version = package.version return controller.RETURN_CODE_UNSUCCESSFUL_RESPONSE_AVAILABLE
def Test(input_proto, output_proto, config): """Run image tests. Args: input_proto (image_pb2.ImageTestRequest): The input message. output_proto (image_pb2.ImageTestResult): The output message. config (api_config.ApiConfig): The API call config. """ image_path = input_proto.image.path board = input_proto.build_target.name result_directory = input_proto.result.directory if not os.path.isfile(image_path) or not image_path.endswith('.bin'): cros_build_lib.Die( 'The image.path must be an existing image file with a .bin extension.') if config.validate_only: return controller.RETURN_CODE_VALID_INPUT success = image.Test(board, result_directory, image_dir=image_path) output_proto.success = success if success: return controller.RETURN_CODE_SUCCESS else: return controller.RETURN_CODE_COMPLETED_UNSUCCESSFULLY
def UserActReviewers(opts, cl, *args): """Add/remove reviewers' emails for a CL (prepend with '~' to remove)""" emails = args # Allow for optional leading '~'. email_validator = re.compile(r'^[~]?%s$' % constants.EMAIL_REGEX) add_list, remove_list, invalid_list = [], [], [] for x in emails: if not email_validator.match(x): invalid_list.append(x) elif x[0] == '~': remove_list.append(x[1:]) else: add_list.append(x) if invalid_list: cros_build_lib.Die('Invalid email address(es): %s' % ', '.join(invalid_list)) if add_list or remove_list: helper, cl = GetGerrit(opts, cl) helper.SetReviewers(cl, add=add_list, remove=remove_list, dryrun=opts.dryrun)
def main(argv): parser = GetParser() opts = parser.parse_args(argv) # A cache of gerrit helpers we'll load on demand. opts.gerrit = {} # Convert user friendly command line option into a gerrit parameter. opts.notify = 'ALL' if opts.send_email else 'NONE' opts.Freeze() # pylint: disable=global-statement global COLOR COLOR = terminal.Color(enabled=opts.color) # Now look up the requested user action and run it. functor = globals().get(ACTION_PREFIX + opts.action.capitalize()) if functor: argspec = inspect.getargspec(functor) if argspec.varargs: arg_min = getattr(functor, 'arg_min', len(argspec.args)) if len(opts.args) < arg_min: parser.error( 'incorrect number of args: %s expects at least %s' % (opts.action, arg_min)) elif len(argspec.args) - 1 != len(opts.args): parser.error('incorrect number of args: %s expects %s' % (opts.action, len(argspec.args) - 1)) try: functor(opts, *opts.args) except (cros_build_lib.RunCommandError, gerrit.GerritException, gob_util.GOBError) as e: cros_build_lib.Die(e) else: parser.error('unknown action: %s' % (opts.action, ))
def main(argv): opts = _ParseArguments(argv) args = (constants.BOTH_OVERLAYS, opts.board) # Verify that a primary overlay exists. try: primary_overlay = portage_util.FindPrimaryOverlay(*args) except portage_util.MissingOverlayException as ex: cros_build_lib.Die(str(ex)) # Get the overlays to print. if opts.primary_only: overlays = [primary_overlay] else: overlays = portage_util.FindOverlays(*args) # Exclude any overlays in src/third_party, for backwards compatibility with # scripts that expected these to not be listed. if not opts.all: ignore_prefix = os.path.join(constants.SOURCE_ROOT, 'src', 'third_party') overlays = [o for o in overlays if not o.startswith(ignore_prefix)] if opts.board_overlay and os.path.isdir(opts.board_overlay): overlays.append(os.path.abspath(opts.board_overlay)) print('\n'.join(overlays))
def FromPath(cls, path, empty_if_missing=False): if os.path.isfile(path): with open(path) as f: return cls(f.read()) elif empty_if_missing: cros_build_lib.Die('Manifest file, %r, not found' % path) return cls()
def UprevVersionedPackage(input_proto, output_proto, _config): """Uprev a versioned package. See go/pupr-generator for details about this endpoint. """ chroot = controller_util.ParseChroot(input_proto.chroot) build_targets = controller_util.ParseBuildTargets(input_proto.build_targets) package = controller_util.PackageInfoToCPV(input_proto.package_info) refs = [] for ref in input_proto.versions: refs.append(GitRef(path=ref.repository, ref=ref.ref, revision=ref.revision)) try: result = packages.uprev_versioned_package(package, build_targets, refs, chroot) except packages.Error as e: # Handle module errors nicely, let everything else bubble up. cros_build_lib.Die(e) if not result.uprevved: # No uprevs executed, skip the output population. return for modified in result.modified: uprev_response = output_proto.responses.add() uprev_response.version = modified.new_version for path in modified.files: uprev_response.modified_ebuilds.add().path = path