def testBuildStrippedPackagesArchive(self): """Test generation of stripped package tarball using globs.""" package_globs = ['chromeos-base/chromeos-chrome', 'sys-kernel/*kernel*'] self.PatchObject( portage_util, 'FindPackageNameMatches', side_effect=[ [portage_util.SplitCPV('chromeos-base/chrome-1-r0')], [portage_util.SplitCPV('sys-kernel/kernel-1-r0'), portage_util.SplitCPV('sys-kernel/kernel-2-r0')]]) # Drop "stripped packages". pkg_dir = pathjoin(self._buildroot, 'chroot', 'build', 'test-board', 'stripped-packages') osutils.Touch(pathjoin(pkg_dir, 'chromeos-base', 'chrome-1-r0.tbz2'), makedirs=True) sys_kernel = pathjoin(pkg_dir, 'sys-kernel') osutils.Touch(pathjoin(sys_kernel, 'kernel-1-r0.tbz2'), makedirs=True) osutils.Touch(pathjoin(sys_kernel, 'kernel-1-r01.tbz2'), makedirs=True) osutils.Touch(pathjoin(sys_kernel, 'kernel-2-r0.tbz1'), makedirs=True) osutils.Touch(pathjoin(sys_kernel, 'kernel-2-r0.tbz2'), makedirs=True) stripped_files_list = [ abspath(pathjoin(pkg_dir, 'chromeos-base', 'chrome-1-r0.tbz2')), abspath(pathjoin(pkg_dir, 'sys-kernel', 'kernel-1-r0.tbz2')), abspath(pathjoin(pkg_dir, 'sys-kernel', 'kernel-2-r0.tbz2'))] tar_mock = self.PatchObject(commands, 'BuildTarball') self.PatchObject(cros_build_lib, 'RunCommand') commands.BuildStrippedPackagesTarball(self._buildroot, 'test-board', package_globs, self.tempdir) tar_mock.assert_called_once_with( self._buildroot, stripped_files_list, pathjoin(self.tempdir, 'stripped-packages.tar'), compressed=False)
def _NeedsInstall(self, cpv, slot, build_time, optional): """Returns whether a package needs to be installed on the target. Args: cpv: Fully qualified CPV (string) of the package. slot: Slot identifier (string). build_time: The BUILT_TIME value (string) of the binpkg. optional: Whether package is optional on the target. Returns: A tuple (install, update) indicating whether to |install| the package and whether it is an |update| to an existing package. Raises: ValueError: if slot is not provided. """ # If not checking installed packages, always install. if not self.target_db: return True, False cp = self._GetCP(cpv) target_pkg_info = self.target_db.get(cp, dict()).get(slot) if target_pkg_info is not None: if cpv != target_pkg_info.cpv: attrs = portage_util.SplitCPV(cpv) target_attrs = portage_util.SplitCPV(target_pkg_info.cpv) logging.debug( 'Updating %s: version (%s) different on target (%s)', cp, attrs.version, target_attrs.version) return True, True if build_time != target_pkg_info.build_time: logging.debug( 'Updating %s: build time (%s) different on target (%s)', cpv, build_time, target_pkg_info.build_time) return True, True logging.debug('Not updating %s: already up-to-date (%s, built %s)', cp, target_pkg_info.cpv, target_pkg_info.build_time) return False, False if optional: logging.debug('Not installing %s: missing on target but optional', cp) return False, False logging.debug('Installing %s: missing on target and non-optional (%s)', cp, cpv) return True, False
def GetIrrelevantChanges(cls, changes, config, build_root, manifest, packages_under_test): """Determine changes irrelavant to build |config|. This method determine a set of changes that are irrelevant to the build |config|. The general rule of thumb is that if we are unsure whether a change is relevant, consider it relevant. Args: changes: The list or set of GerritPatch instances. config: The cbuildbot config. build_root: Path to the build root. manifest: A ManifestCheckout instance representing our build directory. packages_under_test: A list of packages that were tested in this build. Returns: A subset of |changes| which are irrelevant to |config|. """ untriaged_changes = set(changes) irrelevant_changes = set() # Changes that modify projects used in building are always relevant. untriaged_changes -= cls.GetChangesToBuildTools(changes, manifest) if packages_under_test is not None: # Strip the version of the package in packages_under_test. cpv_list = [portage_util.SplitCPV(x) for x in packages_under_test] packages_under_test = [ '%s/%s' % (x.category, x.package) for x in cpv_list ] # Handles overlay changes. # ClassifyOverlayChanges only handles overlays visible to this # build. For example, an external build may not be able to view # the internal overlays. However, in that case, the internal changes # have already been filtered out in CommitQueueSyncStage, and are # not included in |changes|. overlay_changes, irrelevant_overlay_changes = cls.ClassifyOverlayChanges( untriaged_changes, config, build_root, manifest, packages_under_test) untriaged_changes -= overlay_changes irrelevant_changes |= irrelevant_overlay_changes # Handles workon package changes. if packages_under_test is not None: try: workon_changes, irrelevant_workon_changes = cls.ClassifyWorkOnChanges( untriaged_changes, config, build_root, manifest, packages_under_test) except Exception as e: # Ignore the exception if we cannot categorize workon # changes. We will conservatively assume the changes are # relevant. logging.warning('Unable to categorize cros workon changes: %s', e) else: untriaged_changes -= workon_changes irrelevant_changes |= irrelevant_workon_changes return irrelevant_changes
def testPackageBuildFailure(self): """Test handling of raised BuildPackageFailure.""" tempdir = osutils.TempDir(base_dir=self.tempdir) self.PatchObject(osutils, 'TempDir', return_value=tempdir) pkgs = ['cat/pkg', 'foo/bar'] expected = [('cat', 'pkg'), ('foo', 'bar')] result = test_service.BuildTargetUnitTestResult(1, None) result.failed_cpvs = [ portage_util.SplitCPV(p, strict=False) for p in pkgs ] self.PatchObject(test_service, 'BuildTargetUnitTest', return_value=result) input_msg = self._GetInput(board='board', result_path=self.tempdir) output_msg = self._GetOutput() rc = test_controller.BuildTargetUnitTest(input_msg, output_msg, self.api_config) self.assertEqual( controller.RETURN_CODE_UNSUCCESSFUL_RESPONSE_AVAILABLE, rc) self.assertTrue(output_msg.failed_packages) failed = [] for pi in output_msg.failed_packages: failed.append((pi.category, pi.package_name)) self.assertCountEqual(expected, failed)
def _AddPackagesForPrebuilt(filename): """Add list of packages for upload. Process a file that lists all the packages that can be uploaded to the package prebuilt bucket and generates the command line args for upload_prebuilts. Args: filename: file with the package full name (category/name-version), one package per line. Returns: A list of parameters for upload_prebuilts. For example: ['--packages=net-misc/dhcp', '--packages=app-admin/eselect-python'] """ try: cmd = [] with open(filename) as f: # Get only the package name and category as that is what upload_prebuilts # matches on. for line in f: atom = line.split('#', 1)[0].strip() try: cpv = portage_util.SplitCPV(atom) except ValueError: logging.warning('Could not split atom %r (line: %r)', atom, line) continue if cpv: cmd.extend(['--packages=%s/%s' % (cpv.category, cpv.package)]) return cmd except IOError as e: logging.warning('Problem with package file %s' % filename) logging.warning('Skipping uploading of prebuilts.') logging.warning('ERROR(%d): %s' % (e.errno, e.strerror)) return None
def _ShouldFilterPackage(self, pkg): if not self._packages: return False cpv = portage_util.SplitCPV(pkg['CPV']) cp = '%s/%s' % (cpv.category, cpv.package) self._found_packages.add(cp) return cpv.package not in self._packages and cp not in self._packages
def testFailureOutputHandling(self): """Test failed package handling.""" # Prevent argument validation error. self.PatchObject(sysroot_lib.Sysroot, 'IsToolchainInstalled', return_value=True) in_proto = self._InputProto(build_target=self.build_target, sysroot_path=self.sysroot) out_proto = self._OutputProto() # Failed package info and expected list for verification. err_pkgs = ['cat/pkg', 'cat2/pkg2'] err_cpvs = [ portage_util.SplitCPV(cpv, strict=False) for cpv in err_pkgs ] expected = [('cat', 'pkg'), ('cat2', 'pkg2')] # Force error to be raised with the packages. error = sysroot_lib.PackageInstallError('Error', cros_build_lib.CommandResult(), packages=err_cpvs) self.PatchObject(sysroot_service, 'BuildPackages', side_effect=error) rc = sysroot_controller.InstallPackages(in_proto, out_proto, self.api_config) # This needs to return 2 to indicate the available error response. self.assertEqual( controller.RETURN_CODE_UNSUCCESSFUL_RESPONSE_AVAILABLE, rc) for package in out_proto.failed_packages: cat_pkg = (package.category, package.package_name) self.assertIn(cat_pkg, expected)
def DetermineAndroidVersion(self, boards=None): """Determine the current Android version in buildroot now and return it. This uses the typical portage logic to determine which version of Android is active right now in the buildroot. Args: boards: List of boards to check version of. Returns: The Android build ID of the container for the boards. Raises: NoAndroidVersionError: if no unique Android version can be determined. """ if not boards: return None # Verify that all boards have the same version. version = None for board in boards: package = self.DetermineAndroidPackage(board) if not package: raise NoAndroidVersionError( 'Android version could not be determined for %s' % boards) cpv = portage_util.SplitCPV(package) if not cpv: raise NoAndroidVersionError( 'Android version could not be determined for %s' % board) if not version: version = cpv.version_no_rev elif version != cpv.version_no_rev: raise NoAndroidVersionError( 'Different Android versions (%s vs %s) for %s' % (version, cpv.version_no_rev, boards)) return version
def AugmentDepGraphProtoFromJsonMap(json_map, graph): """Augment package deps from |json_map| to graph object. Args: json_map: the json object that stores the portage package. This is generated from chromite.lib.service.dependency.GetBuildDependency() graph: the proto object that represents the dependency graph (see DepGraph message in chromite/api/depgraph.proto) """ graph.build_target.name = json_map['target_board'] for data in json_map['package_deps'].values(): package_dep_info = graph.package_deps.add() package_info = package_dep_info.package_info package_info.package_name = data['name'] package_info.category = data['category'] package_info.version = data['version'] for dep in data['deps']: cpv = portage_util.SplitCPV(dep, strict=False) dep_package = package_dep_info.dependency_packages.add() dep_package.package_name = cpv.package dep_package.category = cpv.category if cpv.version: dep_package.version = cpv.version package_CPV = '%s/%s-%s' % ( package_info.category, package_info.package_name, package_info.version) for path in json_map['source_path_mapping'][package_CPV]: source_path = package_dep_info.dependency_source_paths.add() source_path.path = path
def testDeployEmerge(self): """Test that deploy._Emerge is called for each package.""" _BINPKG = '/path/to/bar-1.2.5.tbz2' def FakeIsFile(fname): return fname == _BINPKG packages = ['some/foo-1.2.3', _BINPKG, 'some/foobar-2.0'] cpvs = ['some/foo-1.2.3', 'to/bar-1.2.5', 'some/foobar-2.0'] self.package_scanner.return_value = PackageScannerFake( packages, { 'some/foo-1.2.3': {}, _BINPKG: {}, 'some/foobar-2.0': {} }, cpvs) self.PatchObject(os.path, 'isfile', side_effect=FakeIsFile) deploy.Deploy(None, ['package'], force=True, clean_binpkg=False) # Check that package names were correctly resolved into binary packages. self.get_packages_paths.assert_called_once_with( [portage_util.SplitCPV(p) for p in cpvs], True, 'sysroot') # Check that deploy._Emerge is called the right number of times. self.assertEqual(self.emerge.call_count, len(packages)) self.assertEqual(self.unmerge.call_count, 0)
def _BuildTargetUnitTestFailedResponse(_input_proto, output_proto, _config): """Add failed packages to a failed response.""" packages = ['foo/bar', 'cat/pkg'] failed_cpvs = [portage_util.SplitCPV(p, strict=False) for p in packages] for cpv in failed_cpvs: package_info = output_proto.failed_packages.add() controller_util.CPVToPackageInfo(cpv, package_info)
def _FindPackageMatches(self, cpv_pattern): """Returns list of binpkg (CP, slot) pairs that match |cpv_pattern|. This is breaking |cpv_pattern| into its C, P and V components, each of which may or may not be present or contain wildcards. It then scans the binpkgs database to find all atoms that match these components, returning a list of CP and slot qualifier. When the pattern does not specify a version, or when a CP has only one slot in the binpkgs database, we omit the slot qualifier in the result. Args: cpv_pattern: A CPV pattern, potentially partial and/or having wildcards. Returns: A list of (CPV, slot) pairs of packages in the binpkgs database that match the pattern. """ attrs = portage_util.SplitCPV(cpv_pattern, strict=False) cp_pattern = os.path.join(attrs.category or '*', attrs.package or '*') matches = [] for cp, cp_slots in self.binpkgs_db.iteritems(): if not fnmatch.fnmatchcase(cp, cp_pattern): continue # If no version attribute was given or there's only one slot, omit the # slot qualifier. if not attrs.version or len(cp_slots) == 1: matches.append((cp, None)) else: cpv_pattern = '%s-%s' % (cp, attrs.version) for slot, pkg_info in cp_slots.iteritems(): if fnmatch.fnmatchcase(pkg_info.cpv, cpv_pattern): matches.append((cp, slot)) return matches
def _GetPackagesPaths(pkgs, strip, sysroot): """Returns paths to binary |pkgs|. Each package argument may be specified as a filename, in which case it is returned as-is, or it may be a CPV value, in which case it is stripped (if instructed) and a path to it is returned. Args: pkgs: List of package arguments. strip: Whether or not to run strip_package for CPV packages. sysroot: The sysroot path. Returns: List of paths corresponding to |pkgs|. """ indexes = [] cpvs = [] for i, pkg in enumerate(pkgs): if not os.path.isfile(pkg): indexes.append(i) cpvs.append(portage_util.SplitCPV(pkg)) cpv_paths = cpvs and _GetPackagesByCPV(cpvs, strip, sysroot) paths = list(pkgs) for i, cpv_path in zip(indexes, cpv_paths): paths[i] = cpv_path return paths
def test_calls_function(self): """Test calling a registered function.""" patch = self.PatchObject(self, 'uprev_category_package') cpv = portage_util.SplitCPV('category/package', strict=False) packages.uprev_versioned_package(cpv, [], [], Chroot()) patch.assert_called()
def _ObservePatches(self, temp_space, deps_map): for cpv in deps_map: split = portage_util.SplitCPV(cpv) if self.Ignored('%s/%s' % (split.category, split.package)): continue cmd = self.equery_cmd[:] cmd.extend(['which', cpv]) ebuild_path = self._invoke_command(cmd, print_cmd=False, redirect_stdout=True).output.rstrip() # Some of these packages will be from other portdirs. Since we are # only interested in extracting the patches from one particular # overlay, we skip ebuilds not from that overlay. if self.overlay_dir != os.path.commonprefix([self.overlay_dir, ebuild_path]): continue # By running 'ebuild blah.ebuild prepare', we get logs in PORT_LOGDIR # of what patches were applied. We clean first, to ensure we get a # complete log, and clean again afterwards to avoid leaving a mess. cmd = self.ebuild_cmd[:] cmd.extend([ebuild_path, 'clean', 'prepare', 'clean']) self._invoke_command(cmd, print_cmd=False, redirect_stdout=True) self.package_count += 1 # Done with ebuild. Now just harvest the logs and we're finished. # This regex is tuned intentionally to ignore a few unhelpful cases. # E.g. elibtoolize repetitively applies a set of sed/portage related # patches. And media-libs/jpeg says it is applying # "various patches (bugfixes/updates)", which isn't very useful for us. # So, if you noticed these omissions, it was intentional, not a bug. :-) patch_regex = r'^ [*] Applying ([^ ]*) [.][.][.].*' output = cros_build_lib.RunCommand( ['egrep', '-r', patch_regex, temp_space], print_cmd=False, redirect_stdout=True).output lines = output.splitlines() patches = [] patch_regex = re.compile(patch_regex) for line in lines: cat, pv, _, patchmsg = line.split(':') cat = os.path.basename(cat) split = portage_util.SplitCPV('%s/%s' % (cat, pv)) patch_name = re.sub(patch_regex, r'\1', patchmsg) patches.append('%s/%s %s' % (cat, split.package, patch_name)) return patches
def testNoVersion(self): """Test handling when no version given.""" pi = common_pb2.PackageInfo() cpv = portage_util.SplitCPV('cat/pkg', strict=False) controller_util.CPVToPackageInfo(cpv, pi) self.assertEqual('cat', pi.category) self.assertEqual('pkg', pi.package_name) self.assertEqual('', pi.version)
def testPackageOnly(self): """Test handling when only given the package name.""" pi = common_pb2.PackageInfo() cpv = portage_util.SplitCPV('pkg', strict=False) controller_util.CPVToPackageInfo(cpv, pi) self.assertEqual('', pi.category) self.assertEqual('pkg', pi.package_name) self.assertEqual('', pi.version)
def testAllFields(self): """Test handling when all fields present.""" pi = common_pb2.PackageInfo() cpv = portage_util.SplitCPV('cat/pkg-2.0.0', strict=False) controller_util.CPVToPackageInfo(cpv, pi) self.assertEqual('cat', pi.category) self.assertEqual('pkg', pi.package_name) self.assertEqual('2.0.0', pi.version)
def testSplitCPV(self): """Test splitting CPV into components.""" cpv = 'foo/bar-4.5.6_alpha-r6' cat, pv = cpv.split('/', 1) split_pv = portage_util.SplitPV(pv) split_cpv = portage_util.SplitCPV(cpv) self.assertEquals(split_cpv.category, cat) for k, v in split_pv._asdict().iteritems(): self.assertEquals(getattr(split_cpv, k), v)
def __init__(self, board, fullnamerev): """Package info initializer. Args: board: The board this package was built for. fullnamerev: package name of the form 'x11-base/X.Org-1.9.3-r23' """ self.board = board # This field may be None, based on entry path. # # Populate these fields from fullnamerev: # category, name, version, revision # try: cpv = portage_util.SplitCPV(fullnamerev) except TypeError: cpv = None # A bad package can either raise a TypeError exception or return None. if not cpv: raise AssertionError( 'portage couldn\'t find %s, missing version number?' % fullnamerev) # # These define the package uniquely. # self.category, self.name, self.version, self.revision = ( cpv.category, cpv.package, cpv.version_no_rev, cpv.rev) if self.revision is not None: self.revision = str(self.revision).lstrip('r') # # These fields hold license information used to generate the credits page. # # This contains licenses names for this package. self.license_names = set() # Full Text of discovered license information. self.license_text_scanned = [] self.homepages = [] # # These fields show the results of processing. # # After reading basic package information, we can mark the package as # one to skip in licensing. self.skip = False # Intellegently populate initial skip information. self.LookForSkip()
def test_determine_chrome_version(self): """Tests that a valid chrome version is returned.""" # Mock PortageqBestVisible to return a valid chrome version string. r1_cpf = 'chromeos-base/chromeos-chrome-78.0.3900.0_rc-r1' r1_cpv = portage_util.SplitCPV(r1_cpf) self.PatchObject(portage_util, 'PortageqBestVisible', return_value=r1_cpv) chrome_version = packages.determine_chrome_version(self.build_target) version_numbers = chrome_version.split('.') self.assertEqual(len(version_numbers), 4) self.assertEqual(int(version_numbers[0]), 78)
def GenerateCPEList(deps_list, sysroot): """Generate all CPEs for the packages included in deps_list and SDK packages Args: deps_list: A flattened dependency tree (cros_extract_deps format). sysroot: The board directory to use when finding SDK packages. Returns: A list of CPE info for packages in deps_list and SDK packages, e.g. [ { "ComponentName": "app-admin/sudo", "Repository": "cros", "Targets": [ "cpe:/a:todd_miller:sudo:1.8.19p2" ] }, { "ComponentName": "sys-libs/glibc", "Repository": "cros", "Targets": [ "cpe:/a:gnu:glibc:2.23" ] } ] """ cpe_dump = [] # Generage CPEs for SDK packages. for sdk_cpv in sorted(GenerateSDKCPVList(sysroot)): # Only add CPE for SDK CPVs missing in deps_list. if deps_list.get(sdk_cpv) is not None: continue split = portage_util.SplitCPV(sdk_cpv) cpes = GetCPEFromCPV(split.category, split.package, split.version_no_rev) if cpes: cpe_dump.append({'ComponentName': '%s' % split.cp, 'Repository': 'cros', 'Targets': sorted(cpes)}) else: logging.warning('No CPE entry for %s', sdk_cpv) # Generage CPEs for packages in deps_list. for cpv, record in sorted(deps_list.items()): if record['cpes']: name = '%s/%s' % (record['category'], record['name']) cpe_dump.append({'ComponentName': name, 'Repository': 'cros', 'Targets': sorted(record['cpes'])}) else: logging.warning('No CPE entry for %s', cpv) return sorted(cpe_dump, key=lambda k: k['ComponentName'])
def _GetPackagesPaths(pkgs, strip, sysroot): """Returns paths to binary |pkgs|. Args: pkgs: List of package CPVs string. strip: Whether or not to run strip_package for CPV packages. sysroot: The sysroot path. Returns: List of paths corresponding to |pkgs|. """ cpvs = [portage_util.SplitCPV(p) for p in pkgs] return _GetPackagesByCPV(cpvs, strip, sysroot)
def __init__(self, return_code, failed_packages): """Init method. Args: return_code (int): The build return code. failed_packages (list[str]): A list of failed packages as strings. """ self.failed_packages = [] for package in failed_packages or []: self.failed_packages.append(portage_util.SplitCPV(package, strict=False)) # The return code should always be non-zero if there's any failed packages, # but it's cheap insurance, so check it. self.success = return_code == 0 and not self.failed_packages
def _FindKernelVersion(self): """Returns a string containing the kernel version for this build.""" try: packages = portage_util.GetPackageDependencies( self._current_board, 'virtual/linux-sources') except cros_build_lib.RunCommandError: logging.warning('Unable to get package list for metadata.') return None for package in packages: if package.startswith('sys-kernel/chromeos-kernel-'): kernel_version = portage_util.SplitCPV(package).version logging.info('Found active kernel version: %s', kernel_version) return kernel_version return None
def setUp(self): self.buildstore = FakeBuildStore() # Replace sudo_run, since we don't care about sudo. self.PatchObject(cros_build_lib, 'sudo_run', wraps=cros_build_lib.run) self.uploadartifact_mock = self.PatchObject( generic_stages.ArchivingStageMixin, 'UploadArtifact') # Prepare a fake chroot. self.fake_chroot = os.path.join(self.build_root, 'chroot/build/amd64-host') self.fake_json_data = {} osutils.SafeMakedirs(self.fake_chroot) osutils.Touch(os.path.join(self.fake_chroot, 'file')) for package, v in self.fake_packages: cpv = portage_util.SplitCPV('%s-%s' % (package, v)) self.fake_json_data.setdefault(cpv.cp, []).append([v, {}])
def testFailure(self): """Test non-zero return code and failed package handling.""" packages = ['foo/bar', 'cat/pkg'] cpvs = [portage_util.SplitCPV(p, strict=False) for p in packages] self.PatchObject(portage_util, 'ParseDieHookStatusFile', return_value=cpvs) expected_rc = 1 self.rc.SetDefaultCmdResult(returncode=expected_rc) result = test.BuildTargetUnitTest(self.build_target, self.chroot) self.assertFalse(result.success) self.assertEqual(expected_rc, result.return_code) self.assertCountEqual(cpvs, result.failed_cpvs)
def DetermineAndroidVersion(self, package): """Determine the current Android version in buildroot now and return it. This uses the typical portage logic to determine which version of Android is active right now in the buildroot. Workspace version of cbuildbot_run.DetermineAndroidVersion(). Args: package: String name of Android package to get version of. Returns: The Android build ID of the container for the boards. """ cpv = portage_util.SplitCPV(package) return cpv.version_no_rev
def setUp(self): # Replace SudoRunCommand, since we don't care about sudo. self.PatchObject(cros_build_lib, 'SudoRunCommand', wraps=cros_build_lib.RunCommand) # Prepare a fake chroot. self.fake_chroot = os.path.join(self.build_root, 'chroot/build/amd64-host') self.fake_json_data = {} osutils.SafeMakedirs(self.fake_chroot) osutils.Touch(os.path.join(self.fake_chroot, 'file')) for package, v in self.fake_packages: cpv = portage_util.SplitCPV('%s-%s' % (package, v)) key = '%s/%s' % (cpv.category, cpv.package) self.fake_json_data.setdefault(key, []).append([v, {}])
def MarkStable(input_proto, output_proto, _config): """Uprev Android, if able. Uprev Android, verify that the newly uprevved package can be emerged, and return the new package info. See AndroidService documentation in api/proto/android.proto. Args: input_proto (MarkStableRequest): The input proto. output_proto (MarkStableResponse): The output proto. _config (api_config.ApiConfig): The call config. """ chroot = controller_util.ParseChroot(input_proto.chroot) build_targets = controller_util.ParseBuildTargets( input_proto.build_targets) tracking_branch = input_proto.tracking_branch package_name = input_proto.package_name android_build_branch = input_proto.android_build_branch android_version = input_proto.android_version android_gts_build_branch = input_proto.android_gts_build_branch # Assume success. output_proto.status = android_pb2.MARK_STABLE_STATUS_SUCCESS # TODO(crbug/904939): This should move to service/android.py and the port # should be finished. try: android_atom_to_build = packages.uprev_android( tracking_branch=tracking_branch, android_package=package_name, android_build_branch=android_build_branch, chroot=chroot, build_targets=build_targets, android_version=android_version, android_gts_build_branch=android_gts_build_branch) except packages.AndroidIsPinnedUprevError as e: # If the uprev failed due to a pin, CI needs to unpin and retry. android_atom_to_build = e.new_android_atom output_proto.status = android_pb2.MARK_STABLE_STATUS_PINNED if android_atom_to_build: CPV = portage_util.SplitCPV(android_atom_to_build) output_proto.android_atom.category = CPV.category output_proto.android_atom.package_name = CPV.package output_proto.android_atom.version = CPV.version else: output_proto.status = android_pb2.MARK_STABLE_STATUS_EARLY_EXIT