def BundleEbuildLogs(input_proto, output_proto, config): """Tar the ebuild logs for a build target. Args: input_proto (BundleRequest): The input proto. output_proto (BundleResponse): The output proto. config (api_config.ApiConfig): The API call config. """ output_dir = input_proto.output_dir sysroot_path = input_proto.sysroot.path chroot = controller_util.ParseChroot(input_proto.chroot) # TODO(mmortensen) Cleanup legacy handling after it has been switched over. target = input_proto.build_target.name if target: # Legacy handling. build_root = constants.SOURCE_ROOT chroot = chroot_lib.Chroot(path=os.path.join(build_root, 'chroot')) sysroot_path = os.path.join('/build', target) # TODO(saklein): Switch to validation_complete decorator after legacy # handling has been cleaned up. if config.validate_only: return controller.RETURN_CODE_VALID_INPUT sysroot = sysroot_lib.Sysroot(sysroot_path) archive = artifacts.BundleEBuildLogsTarball(chroot, sysroot, output_dir) if archive is None: cros_build_lib.Die( 'Could not create ebuild logs archive. No logs found for %s.', sysroot.path) output_proto.artifacts.add().path = os.path.join(output_dir, archive)
def BundleAFDOGenerationArtifacts(input_proto, output_proto, _config): """Generic function for creating tarballs of both AFDO and orderfile. Args: input_proto (BundleChromeAFDORequest): The input proto. output_proto (BundleResponse): The output proto. _config (api_config.ApiConfig): The API call config. """ # Required args. build_target = build_target_util.BuildTarget(input_proto.build_target.name) chrome_root = input_proto.chroot.chrome_dir if not chrome_root: cros_build_lib.Die('chrome_root is not included in chroot') output_dir = input_proto.output_dir artifact_type = input_proto.artifact_type chroot = controller_util.ParseChroot(input_proto.chroot) try: is_orderfile = bool(artifact_type is toolchain_pb2.ORDERFILE) results = artifacts.BundleAFDOGenerationArtifacts( is_orderfile, chroot, chrome_root, build_target, output_dir) except artifacts.Error as e: cros_build_lib.Die('Error %s raised in BundleSimpleChromeArtifacts: %s', type(e), e) for file_name in results: output_proto.artifacts.add().path = file_name
def BundleChromeOSConfig(input_proto, output_proto, _config): """Output the ChromeOS Config payload for a build target. Args: input_proto (BundleRequest): The input proto. output_proto (BundleResponse): The output proto. _config (api_config.ApiConfig): The API call config. """ output_dir = input_proto.output_dir sysroot_path = input_proto.sysroot.path chroot = controller_util.ParseChroot(input_proto.chroot) # TODO(mmortensen) Cleanup legacy handling after it has been switched over. target = input_proto.build_target.name if target: # Legacy handling. build_root = constants.SOURCE_ROOT chroot = chroot_lib.Chroot(path=os.path.join(build_root, 'chroot')) sysroot_path = os.path.join('/build', target) sysroot = sysroot_lib.Sysroot(sysroot_path) chromeos_config = artifacts.BundleChromeOSConfig(chroot, sysroot, output_dir) if chromeos_config is None: cros_build_lib.Die( 'Could not create ChromeOS Config payload. No config found for %s.', sysroot.path) output_proto.artifacts.add().path = os.path.join(output_dir, chromeos_config)
def BundleFpmcuUnittests(input_proto, output_proto, _config): """Tar the fingerprint MCU unittest binaries for a build target. Args: input_proto (BundleRequest): The input proto. output_proto (BundleResponse): The output proto. _config (api_config.ApiConfig): The API call config. """ output_dir = input_proto.output_dir chroot = controller_util.ParseChroot(input_proto.chroot) sysroot_path = input_proto.sysroot.path sysroot = sysroot_lib.Sysroot(sysroot_path) if not chroot.exists(): cros_build_lib.Die('Chroot does not exist: %s', chroot.path) elif not sysroot.Exists(chroot=chroot): cros_build_lib.Die('Sysroot does not exist: %s', chroot.full_path(sysroot.path)) archive = artifacts.BundleFpmcuUnittests(chroot, sysroot, output_dir) if archive is None: logging.warning('No fpmcu unittests found for %s.', sysroot_path) return output_proto.artifacts.add().path = archive
def BundleSimpleChromeArtifacts(input_proto, output_proto, _config): """Create the simple chrome artifacts.""" sysroot_path = input_proto.sysroot.path output_dir = input_proto.output_dir # Build out the argument instances. build_target = controller_util.ParseBuildTarget( input_proto.sysroot.build_target) chroot = controller_util.ParseChroot(input_proto.chroot) # Sysroot.path needs to be the fully qualified path, including the chroot. full_sysroot_path = os.path.join(chroot.path, sysroot_path.lstrip(os.sep)) sysroot = sysroot_lib.Sysroot(full_sysroot_path) # Quick sanity check that the sysroot exists before we go on. if not sysroot.Exists(): cros_build_lib.Die('The sysroot does not exist.') try: results = artifacts.BundleSimpleChromeArtifacts( chroot, sysroot, build_target, output_dir) except artifacts.Error as e: cros_build_lib.Die( 'Error %s raised in BundleSimpleChromeArtifacts: %s', type(e), e) for file_name in results: output_proto.artifacts.add().path = file_name
def UprevVersionedPackage(input_proto, output_proto, _config): """Uprev a versioned package. See go/pupr-generator for details about this endpoint. """ chroot = controller_util.ParseChroot(input_proto.chroot) build_targets = controller_util.ParseBuildTargets(input_proto.build_targets) package = controller_util.PackageInfoToCPV(input_proto.package_info) refs = [] for ref in input_proto.versions: refs.append(GitRef(path=ref.repository, ref=ref.ref, revision=ref.revision)) try: result = packages.uprev_versioned_package(package, build_targets, refs, chroot) except packages.Error as e: # Handle module errors nicely, let everything else bubble up. cros_build_lib.Die(e) if not result.uprevved: # No uprevs executed, skip the output population. return for modified in result.modified: uprev_response = output_proto.responses.add() uprev_response.version = modified.new_version for path in modified.files: uprev_response.modified_ebuilds.add().path = path
def ExportCpeReport(input_proto, output_proto, config): """Export a CPE report. Args: input_proto (BundleRequest): The input proto. output_proto (BundleResponse): The output proto. config (api_config.ApiConfig): The API call config. """ chroot = controller_util.ParseChroot(input_proto.chroot) output_dir = input_proto.output_dir if input_proto.build_target.name: # Legacy handling - use the default sysroot path for the build target. build_target = controller_util.ParseBuildTarget(input_proto.build_target) sysroot = sysroot_lib.Sysroot(build_target.root) elif input_proto.sysroot.path: sysroot = sysroot_lib.Sysroot(input_proto.sysroot.path) else: # TODO(saklein): Switch to validate decorators once legacy handling can be # cleaned up. cros_build_lib.Die('sysroot.path is required.') if config.validate_only: return controller.RETURN_CODE_VALID_INPUT cpe_result = artifacts.GenerateCpeReport(chroot, sysroot, output_dir) output_proto.artifacts.add().path = cpe_result.report output_proto.artifacts.add().path = cpe_result.warnings
def BundleFirmware(input_proto, output_proto, _config): """Tar the firmware images for a build target. Args: input_proto (BundleRequest): The input proto. output_proto (BundleResponse): The output proto. _config (api_config.ApiConfig): The API call config. """ output_dir = input_proto.output_dir chroot = controller_util.ParseChroot(input_proto.chroot) sysroot_path = input_proto.sysroot.path sysroot = sysroot_lib.Sysroot(sysroot_path) if not chroot.exists(): cros_build_lib.Die('Chroot does not exist: %s', chroot.path) elif not sysroot.Exists(chroot=chroot): cros_build_lib.Die('Sysroot does not exist: %s', chroot.full_path(sysroot.path)) archive = artifacts.BuildFirmwareArchive(chroot, sysroot, output_dir) if archive is None: cros_build_lib.Die( 'Could not create firmware archive. No firmware found for %s.', sysroot_path) output_proto.artifacts.add().path = archive
def testSuccess(self): """Test successful handling case.""" path = '/chroot/path' cache_dir = '/cache/dir' chrome_root = '/chrome/root' use_flags = [{'flag': 'useflag1'}, {'flag': 'useflag2'}] features = [{'feature': 'feature1'}, {'feature': 'feature2'}] expected_env = { 'USE': 'useflag1 useflag2', 'FEATURES': 'feature1 feature2', 'CHROME_ORIGIN': 'LOCAL_SOURCE' } chroot_message = common_pb2.Chroot(path=path, cache_dir=cache_dir, chrome_dir=chrome_root, env={ 'use_flags': use_flags, 'features': features }) expected = Chroot(path=path, cache_dir=cache_dir, chrome_root=chrome_root, env=expected_env) result = controller_util.ParseChroot(chroot_message) self.assertEqual(expected, result)
def BuildTargetUnitTest(input_proto, output_proto, _config): """Run a build target's ebuild unit tests.""" # Required args. result_path = input_proto.result_path # Method flags. # An empty sysroot means build packages was not run. This is used for # certain boards that need to use prebuilts (e.g. grunt's unittest-only). was_built = not input_proto.flags.empty_sysroot # Packages to be tested. packages_package_info = input_proto.packages packages = [] for package_info_msg in packages_package_info: packages.append(controller_util.PackageInfoToString(package_info_msg)) # Skipped tests. # TODO: Remove blacklist when we fully switch to blocklist. blocklisted_package_info = (input_proto.package_blacklist or input_proto.package_blocklist) blocklist = [] for package_info_msg in blocklisted_package_info: blocklist.append(controller_util.PackageInfoToString(package_info_msg)) # Allow call to succeed if no tests were found. testable_packages_optional = input_proto.flags.testable_packages_optional build_target = controller_util.ParseBuildTarget(input_proto.build_target) chroot = controller_util.ParseChroot(input_proto.chroot) code_coverage = input_proto.flags.code_coverage result = test.BuildTargetUnitTest( build_target, chroot, packages=packages, blocklist=blocklist, was_built=was_built, code_coverage=code_coverage, testable_packages_optional=testable_packages_optional) if not result.success: # Failed to run tests or some tests failed. # Record all failed packages. for cpv in result.failed_cpvs: package_info_msg = output_proto.failed_packages.add() controller_util.CPVToPackageInfo(cpv, package_info_msg) if result.failed_cpvs: return controller.RETURN_CODE_UNSUCCESSFUL_RESPONSE_AVAILABLE else: return controller.RETURN_CODE_COMPLETED_UNSUCCESSFULLY sysroot = sysroot_lib.Sysroot(build_target.root) tarball = test.BuildTargetUnitTestTarball(chroot, sysroot, result_path) if tarball: output_proto.tarball_path = tarball deserialize_metrics_log(output_proto.events, prefix=build_target.name)
def PrepareBinhostUploads(input_proto, output_proto, config): """Return a list of files to upload to the binhost. See BinhostService documentation in api/proto/binhost.proto. Args: input_proto (PrepareBinhostUploadsRequest): The input proto. output_proto (PrepareBinhostUploadsResponse): The output proto. config (api_config.ApiConfig): The API call config. """ if input_proto.sysroot.build_target.name: build_target_msg = input_proto.sysroot.build_target else: build_target_msg = input_proto.build_target sysroot_path = input_proto.sysroot.path if not sysroot_path and not build_target_msg.name: cros_build_lib.Die('Sysroot.path is required.') build_target = controller_util.ParseBuildTarget(build_target_msg) chroot = controller_util.ParseChroot(input_proto.chroot) if not sysroot_path: sysroot_path = build_target.root sysroot = sysroot_lib.Sysroot(sysroot_path) uri = input_proto.uri # For now, we enforce that all input URIs are Google Storage buckets. if not gs.PathIsGs(uri): raise ValueError('Upload URI %s must be Google Storage.' % uri) if config.validate_only: return controller.RETURN_CODE_VALID_INPUT parsed_uri = urllib.parse.urlparse(uri) upload_uri = gs.GetGsURL(parsed_uri.netloc, for_gsutil=True).rstrip('/') upload_path = parsed_uri.path.lstrip('/') # Read all packages and update the index. The index must be uploaded to the # binhost for Portage to use it, so include it in upload_targets. uploads_dir = binhost.GetPrebuiltsRoot(chroot, sysroot, build_target) index_path = binhost.UpdatePackageIndex(uploads_dir, upload_uri, upload_path, sudo=True) upload_targets = binhost.GetPrebuiltsFiles(uploads_dir) assert index_path.startswith(uploads_dir), ( 'expected index_path to start with uploads_dir') upload_targets.append(index_path[len(uploads_dir):]) output_proto.uploads_dir = uploads_dir for upload_target in upload_targets: output_proto.upload_targets.add().path = upload_target.strip('/')
def PrepareDevInstallBinhostUploads(input_proto, output_proto, config): """Return a list of files to upload to the binhost" The files will also be copied to the uploads_dir. See BinhostService documentation in api/proto/binhost.proto. Args: input_proto (PrepareDevInstallBinhostUploadsRequest): The input proto. output_proto (PrepareDevInstallBinhostUploadsResponse): The output proto. config (api_config.ApiConfig): The API call config. """ sysroot_path = input_proto.sysroot.path # build_target = build_target_util.BuildTarget(target_name) chroot = controller_util.ParseChroot(input_proto.chroot) sysroot = sysroot_lib.Sysroot(sysroot_path) uri = input_proto.uri # For now, we enforce that all input URIs are Google Storage buckets. if not gs.PathIsGs(uri): raise ValueError('Upload URI %s must be Google Storage.' % uri) if config.validate_only: return controller.RETURN_CODE_VALID_INPUT parsed_uri = urllib.parse.urlparse(uri) upload_uri = gs.GetGsURL(parsed_uri.netloc, for_gsutil=True).rstrip('/') upload_path = parsed_uri.path.lstrip('/') # Calculate the filename for the to-be-created Packages file, which will # contain only devinstall packages. devinstall_package_index_path = os.path.join(input_proto.uploads_dir, 'Packages') upload_targets_list = binhost.ReadDevInstallFilesToCreatePackageIndex( chroot, sysroot, devinstall_package_index_path, upload_uri, upload_path) package_dir = chroot.full_path(sysroot.path, 'packages') for upload_target in upload_targets_list: # Copy each package to target/category/package upload_target = upload_target.strip('/') category = upload_target.split(os.sep)[0] target_dir = os.path.join(input_proto.uploads_dir, category) if not os.path.exists(target_dir): os.makedirs(target_dir) full_src_pkg_path = os.path.join(package_dir, upload_target) full_target_src_path = os.path.join(input_proto.uploads_dir, upload_target) shutil.copyfile(full_src_pkg_path, full_target_src_path) output_proto.upload_targets.add().path = upload_target output_proto.upload_targets.add().path = 'Packages'
def BundleVmFiles(input_proto, output_proto, _config): """Tar VM disk and memory files. Args: input_proto (BundleVmFilesRequest): The input proto. output_proto (BundleResponse): The output proto. _config (api_config.ApiConfig): The API call config. """ chroot = controller_util.ParseChroot(input_proto.chroot) test_results_dir = input_proto.test_results_dir output_dir = input_proto.output_dir archives = artifacts.BundleVmFiles(chroot, test_results_dir, output_dir) for archive in archives: output_proto.artifacts.add().path = archive
def MarkStable(input_proto, output_proto, _config): """Uprev Android, if able. Uprev Android, verify that the newly uprevved package can be emerged, and return the new package info. See AndroidService documentation in api/proto/android.proto. Args: input_proto (MarkStableRequest): The input proto. output_proto (MarkStableResponse): The output proto. _config (api_config.ApiConfig): The call config. """ chroot = controller_util.ParseChroot(input_proto.chroot) build_targets = controller_util.ParseBuildTargets( input_proto.build_targets) tracking_branch = input_proto.tracking_branch package_name = input_proto.package_name android_build_branch = input_proto.android_build_branch android_version = input_proto.android_version android_gts_build_branch = input_proto.android_gts_build_branch # Assume success. output_proto.status = android_pb2.MARK_STABLE_STATUS_SUCCESS # TODO(crbug/904939): This should move to service/android.py and the port # should be finished. try: android_atom_to_build = packages.uprev_android( tracking_branch=tracking_branch, android_package=package_name, android_build_branch=android_build_branch, chroot=chroot, build_targets=build_targets, android_version=android_version, android_gts_build_branch=android_gts_build_branch) except packages.AndroidIsPinnedUprevError as e: # If the uprev failed due to a pin, CI needs to unpin and retry. android_atom_to_build = e.new_android_atom output_proto.status = android_pb2.MARK_STABLE_STATUS_PINNED if android_atom_to_build: CPV = portage_util.SplitCPV(android_atom_to_build) output_proto.android_atom.category = CPV.category output_proto.android_atom.package_name = CPV.package output_proto.android_atom.version = CPV.version else: output_proto.status = android_pb2.MARK_STABLE_STATUS_EARLY_EXIT
def RegenBuildCache(input_proto, output_proto, _config): """Regenerate the Build Cache for a build target. See BinhostService documentation in api/proto/binhost.proto. Args: input_proto (RegenBuildCacheRequest): The input proto. output_proto (RegenBuildCacheResponse): The output proto. _config (api_config.ApiConfig): The API call config. """ chroot = controller_util.ParseChroot(input_proto.chroot) overlay_type = input_proto.overlay_type overlays = binhost.RegenBuildCache(chroot, _OVERLAY_TYPE_TO_NAME[overlay_type]) for overlay in overlays: output_proto.modified_overlays.add().path = overlay
def Uprev(input_proto, output_proto, _config): """Uprev all cros workon ebuilds that have changes.""" target_names = [t.name for t in input_proto.build_targets] build_targets = [build_target_util.BuildTarget(t) for t in target_names] overlay_type = _OVERLAY_TYPE_TO_NAME[input_proto.overlay_type] chroot = controller_util.ParseChroot(input_proto.chroot) output_dir = input_proto.output_dir or None try: uprevved = packages.uprev_build_targets(build_targets, overlay_type, chroot, output_dir) except packages.Error as e: # Handle module errors nicely, let everything else bubble up. cros_build_lib.Die(e) for path in uprevved: output_proto.modified_ebuilds.add().path = path
def FetchPinnedGuestImages(input_proto, output_proto, _config): """Get the pinned guest image information.""" sysroot_path = input_proto.sysroot.path chroot = controller_util.ParseChroot(input_proto.chroot) sysroot = sysroot_lib.Sysroot(sysroot_path) if not chroot.exists(): cros_build_lib.Die('Chroot does not exist: %s', chroot.path) elif not sysroot.Exists(chroot=chroot): cros_build_lib.Die('Sysroot does not exist: %s', chroot.full_path(sysroot.path)) pins = artifacts.FetchPinnedGuestImages(chroot, sysroot) for pin in pins: pinned_image = output_proto.pinned_images.add() pinned_image.filename = pin.filename pinned_image.uri = pin.uri
def BundleTastFiles(input_proto, output_proto, config): """Tar the tast files for a build target. Args: input_proto (BundleRequest): The input proto. output_proto (BundleResponse): The output proto. config (api_config.ApiConfig): The API call config. """ target = input_proto.build_target.name output_dir = input_proto.output_dir build_root = constants.SOURCE_ROOT chroot = controller_util.ParseChroot(input_proto.chroot) sysroot_path = input_proto.sysroot.path # TODO(saklein) Cleanup legacy handling after it has been switched over. if target: # Legacy handling. chroot = chroot_lib.Chroot(path=os.path.join(build_root, 'chroot')) sysroot_path = os.path.join('/build', target) # New handling - chroot & sysroot based. # TODO(saklein) Switch this to the require decorator when legacy is removed. if not sysroot_path: cros_build_lib.Die('sysroot.path is required.') # TODO(saklein): Switch to the validation_complete decorator when legacy # handling is removed. if config.validate_only: return controller.RETURN_CODE_VALID_INPUT sysroot = sysroot_lib.Sysroot(sysroot_path) if not sysroot.Exists(chroot=chroot): cros_build_lib.Die('Sysroot must exist.') archive = artifacts.BundleTastFiles(chroot, sysroot, output_dir) if archive is None: cros_build_lib.Die( 'Could not bundle Tast files. ' 'No Tast directories found for %s.', target) output_proto.artifacts.add().path = archive
def BundleArtifacts(input_proto, output_proto, _config): """Bundle toolchain artifacts. The handlers (from _TOOLCHAIN_ARTIFACT_HANDLERS above) are called with: artifact_name (str): name of the artifact type chroot (chroot_lib.Chroot): chroot sysroot_path (str): sysroot path inside the chroot (e.g., /build/atlas), or None. chrome_root (str): path to chrome root. (e.g., /b/s/w/ir/k/chrome) build_target_name (str): name of the build target (e.g., atlas), or None. output_dir (str): absolute path where artifacts are being bundled. (e.g., /b/s/w/ir/k/recipe_cleanup/artifactssptfMU) profile_info ({(str) name: (str) value}) Dictionary containing profile information. Note: the actual upload to GS is done by CI, not here. Args: input_proto (BundleToolchainRequest): The input proto output_proto (BundleToolchainResponse): The output proto _config (api_config.ApiConfig): The API call config. """ chroot = controller_util.ParseChroot(input_proto.chroot) profile_info = _GetProfileInfoDict(input_proto.profile_info) for artifact_type in input_proto.artifact_types: if artifact_type not in _TOOLCHAIN_ARTIFACT_HANDLERS: logging.error('%s not understood', artifact_type) return controller.RETURN_CODE_UNRECOVERABLE handler = _TOOLCHAIN_ARTIFACT_HANDLERS[artifact_type] if handler and handler.bundle: artifacts = handler.bundle(handler.name, chroot, input_proto.sysroot.path, input_proto.sysroot.build_target.name, input_proto.output_dir, profile_info) if artifacts: art_info = output_proto.artifacts_info.add() art_info.artifact_type = artifact_type for artifact in artifacts: art_info.artifacts.add().path = artifact
def BuildTargetUnitTest(input_proto, output_proto, _config): """Run a build target's ebuild unit tests.""" # Required args. board = input_proto.build_target.name result_path = input_proto.result_path # Method flags. # An empty sysroot means build packages was not run. This is used for # certain boards that need to use prebuilts (e.g. grunt's unittest-only). was_built = not input_proto.flags.empty_sysroot # Skipped tests. blacklisted_package_info = input_proto.package_blacklist blacklist = [] for package_info in blacklisted_package_info: blacklist.append(controller_util.PackageInfoToString(package_info)) build_target = build_target_util.BuildTarget(board) chroot = controller_util.ParseChroot(input_proto.chroot) result = test.BuildTargetUnitTest(build_target, chroot, blacklist=blacklist, was_built=was_built) if not result.success: # Failed to run tests or some tests failed. # Record all failed packages. for cpv in result.failed_cpvs: package_info = output_proto.failed_packages.add() controller_util.CPVToPackageInfo(cpv, package_info) if result.failed_cpvs: return controller.RETURN_CODE_UNSUCCESSFUL_RESPONSE_AVAILABLE else: return controller.RETURN_CODE_COMPLETED_UNSUCCESSFULLY sysroot = sysroot_lib.Sysroot(build_target.root) tarball = test.BuildTargetUnitTestTarball(chroot, sysroot, result_path) if tarball: output_proto.tarball_path = tarball deserialize_metrics_log(output_proto.events, prefix=build_target.name)
def MoblabVmTest(input_proto, _output_proto, _config): """Run Moblab VM tests.""" chroot = controller_util.ParseChroot(input_proto.chroot) image_payload_dir = input_proto.image_payload.path.path cache_payload_dirs = [cp.path.path for cp in input_proto.cache_payloads] # Autotest and Moblab depend on the builder path, so we must read it from # the image. image_file = os.path.join(image_payload_dir, constants.TEST_IMAGE_BIN) with osutils.TempDir() as mount_dir: with image_lib.LoopbackPartitions(image_file, destination=mount_dir) as lp: # The file we want is /etc/lsb-release, which lives in the ROOT-A # disk partition. partition_paths = lp.Mount([constants.PART_ROOT_A]) assert len(partition_paths) == 1, ( 'expected one partition path, got: %r' % partition_paths) partition_path = partition_paths[0] lsb_release_file = os.path.join( partition_path, constants.LSB_RELEASE_PATH.strip('/')) lsb_release_kvs = key_value_store.LoadFile(lsb_release_file) builder = lsb_release_kvs.get( cros_set_lsb_release.LSB_KEY_BUILDER_PATH) if not builder: cros_build_lib.Die('Image did not contain key %s in %s', cros_set_lsb_release.LSB_KEY_BUILDER_PATH, constants.LSB_RELEASE_PATH) # Now we can run the tests. with chroot.tempdir() as workspace_dir, chroot.tempdir() as results_dir: # Convert the results directory to an absolute chroot directory. chroot_results_dir = '/%s' % os.path.relpath(results_dir, chroot.path) vms = test.CreateMoblabVm(workspace_dir, chroot.path, image_payload_dir) cache_dir = test.PrepareMoblabVmImageCache(vms, builder, cache_payload_dirs) test.RunMoblabVmTest(chroot, vms, builder, cache_dir, chroot_results_dir) test.ValidateMoblabVmTest(results_dir)
def BundleAutotestFiles(input_proto, output_proto, config): """Tar the autotest files for a build target. Args: input_proto (BundleRequest): The input proto. output_proto (BundleResponse): The output proto. config (api_config.ApiConfig): The API call config. """ output_dir = input_proto.output_dir target = input_proto.build_target.name chroot = controller_util.ParseChroot(input_proto.chroot) if target: sysroot_path = os.path.join('/build', target) else: # New style call, use chroot and sysroot. sysroot_path = input_proto.sysroot.path if not sysroot_path: cros_build_lib.Die('sysroot.path is required.') sysroot = sysroot_lib.Sysroot(sysroot_path) # TODO(saklein): Switch to the validate_only decorator when legacy handling # is removed. if config.validate_only: return controller.RETURN_CODE_VALID_INPUT if not sysroot.Exists(chroot=chroot): cros_build_lib.Die('Sysroot path must exist: %s', sysroot.path) try: # Note that this returns the full path to *multiple* tarballs. archives = artifacts.BundleAutotestFiles(chroot, sysroot, output_dir) except artifacts.Error as e: logging.warning(e) return for archive in archives.values(): output_proto.artifacts.add().path = archive
def Clean(input_proto, _output_proto, _config): """Clean unneeded files from a chroot.""" chroot = controller_util.ParseChroot(input_proto.chroot) sdk.Clean(chroot, images=True, sysroots=True, tmp=True)
def CreateSnapshot(input_proto, output_proto, _config): """Create a chroot snapshot and return a corresponding opaque snapshot key.""" chroot = controller_util.ParseChroot(input_proto.chroot) token = sdk.CreateSnapshot(chroot, replace_if_needed=True) output_proto.snapshot_token.value = token
def RestoreSnapshot(input_proto, _output_proto, _config): """Restore a chroot snapshot from a snapshot key.""" chroot = controller_util.ParseChroot(input_proto.chroot) token = input_proto.snapshot_token.value sdk.RestoreSnapshot(token, chroot)
def Delete(input_proto, _output_proto, _config): """Delete a chroot.""" chroot = controller_util.ParseChroot(input_proto.chroot) sdk.Delete(chroot)
def parse_chroot(self, chroot_message): """Parse a Chroot message instance.""" return controller_util.ParseChroot(chroot_message)
def testWrongMessage(self): """Test invalid message type given.""" with self.assertRaises(AssertionError): controller_util.ParseChroot(common_pb2.BuildTarget())
def testChrootCallToGoma(self): """Test calls to goma.""" path = '/chroot/path' cache_dir = '/cache/dir' chrome_root = '/chrome/root' use_flags = [{'flag': 'useflag1'}, {'flag': 'useflag2'}] features = [{'feature': 'feature1'}, {'feature': 'feature2'}] goma_test_dir = '/goma/test/dir' goma_test_json_string = 'goma_json' chromeos_goma_test_dir = '/chromeos/goma/test/dir' # Patch goma constructor to avoid creating misc dirs. patch = self.PatchObject(goma_util, 'Goma') goma_config = common_pb2.GomaConfig( goma_dir=goma_test_dir, goma_client_json=goma_test_json_string) chroot_message = common_pb2.Chroot(path=path, cache_dir=cache_dir, chrome_dir=chrome_root, env={ 'use_flags': use_flags, 'features': features }, goma=goma_config) controller_util.ParseChroot(chroot_message) patch.assert_called_with(goma_test_dir, goma_test_json_string, stage_name='BuildAPI', chromeos_goma_dir=None, chroot_dir=path, goma_approach=None) goma_config.chromeos_goma_dir = chromeos_goma_test_dir chroot_message = common_pb2.Chroot(path=path, cache_dir=cache_dir, chrome_dir=chrome_root, env={ 'use_flags': use_flags, 'features': features }, goma=goma_config) controller_util.ParseChroot(chroot_message) patch.assert_called_with(goma_test_dir, goma_test_json_string, stage_name='BuildAPI', chromeos_goma_dir=chromeos_goma_test_dir, chroot_dir=path, goma_approach=None) goma_config.goma_approach = common_pb2.GomaConfig.RBE_PROD chroot_message = common_pb2.Chroot(path=path, cache_dir=cache_dir, chrome_dir=chrome_root, env={ 'use_flags': use_flags, 'features': features }, goma=goma_config) controller_util.ParseChroot(chroot_message) patch.assert_called_with(goma_test_dir, goma_test_json_string, stage_name='BuildAPI', chromeos_goma_dir=chromeos_goma_test_dir, chroot_dir=path, goma_approach=goma_util.GomaApproach( '?prod', 'goma.chromium.org', True)) goma_config.goma_approach = common_pb2.GomaConfig.RBE_STAGING chroot_message = common_pb2.Chroot(path=path, cache_dir=cache_dir, chrome_dir=chrome_root, env={ 'use_flags': use_flags, 'features': features }, goma=goma_config) controller_util.ParseChroot(chroot_message) patch.assert_called_with(goma_test_dir, goma_test_json_string, stage_name='BuildAPI', chromeos_goma_dir=chromeos_goma_test_dir, chroot_dir=path, goma_approach=goma_util.GomaApproach( '?staging', 'staging-goma.chromium.org', True))
def Unmount(input_proto, _output_proto, _config): """Unmount a chroot""" chroot = controller_util.ParseChroot(input_proto.chroot) sdk.Unmount(chroot)