def GenerateQuickProvisionPayloads(target_image_path, archive_dir): """Generates payloads needed for quick_provision script. Args: target_image_path (str): The path to the image to extract the partitions. archive_dir (str): Where to store partitions when generated. Returns: list[str]: The artifacts that were produced. """ payloads = [] with osutils.TempDir() as temp_dir: # These partitions are mainly used by quick_provision. kernel_part = 'kernel.bin' rootfs_part = 'rootfs.bin' partition_lib.ExtractKernel(target_image_path, os.path.join(temp_dir, kernel_part)) partition_lib.ExtractRoot(target_image_path, os.path.join(temp_dir, rootfs_part), truncate=False) for partition, payload in { kernel_part: constants.QUICK_PROVISION_PAYLOAD_KERNEL, rootfs_part: constants.QUICK_PROVISION_PAYLOAD_ROOTFS }.items(): source = os.path.join(temp_dir, partition) dest = os.path.join(archive_dir, payload) cros_build_lib.CompressFile(source, dest) payloads.append(dest) return payloads
def CompressAFDOFile(to_compress, buildroot): """Compress file used by AFDO process. Args: to_compress: File to compress. buildroot: buildroot where to store the compressed data. Returns: Name of the compressed data file. """ local_dir = AFDO_BUILDROOT_LOCAL % {'build_root': buildroot} dest = os.path.join(local_dir, os.path.basename(to_compress)) + '.bz2' cros_build_lib.CompressFile(to_compress, dest) return dest
def ExecuteReleaseProfileMergePlan(gs_context, buildroot, merge_plan): """Generates release profiles, given a release profile merge plan. Args: gs_context: How we talk to gs:// buildroot: Our buildroot merge_plan: The second result of GenerateReleaseProfileMergePlan. This determines the profiles we pull and merge. """ _, work_dir, chroot_work_dir = _BuildrootToWorkDirs(buildroot) def path_pair(suffix): outside_chroot = os.path.join(work_dir, suffix) in_chroot = os.path.join(chroot_work_dir, suffix) return in_chroot, outside_chroot chroot_work_dir, work_dir = path_pair('afdo_data_merge') def copy_profile(gs_path, local_path): assert local_path.endswith('.afdo'), local_path assert not gs_path.endswith('.afdo'), gs_path compression_suffix = os.path.splitext(gs_path)[1] temp_path = local_path + compression_suffix gs_context.Copy(gs_path, temp_path) cros_build_lib.UncompressFile(temp_path, local_path) merge_results = {} for version, (cwp_profile, benchmark_profile) in merge_plan.items(): chroot_benchmark_path, benchmark_path = path_pair('benchmark.afdo') copy_profile(benchmark_profile, benchmark_path) chroot_cwp_path, cwp_path = path_pair('cwp.afdo') copy_profile(cwp_profile, cwp_path) chroot_merged_path, merged_path = path_pair('m%d.afdo' % version) merge_weights = [ (chroot_cwp_path, _RELEASE_CWP_MERGE_WEIGHT), (chroot_benchmark_path, _RELEASE_BENCHMARK_MERGE_WEIGHT), ] _MergeAFDOProfiles(merge_weights, chroot_merged_path, use_compbinary=True) comp_merged_path = merged_path + COMPRESSION_SUFFIX cros_build_lib.CompressFile(merged_path, comp_merged_path) merge_results[version] = comp_merged_path return merge_results
def _ArchiveNinjaLog(self, compiler_proxy_path): """Archives .ninja_log file and its related metadata. This archives the .ninja_log file generated by ninja to build Chrome. Also, it appends some related metadata at the end of the file following '# end of ninja log' marker. Args: compiler_proxy_path: Path to the compiler proxy, which will be contained in the metadata. Returns: The name of the archived file. """ ninja_log_path = os.path.join(self._log_dir, 'ninja_log') if not os.path.exists(ninja_log_path): logging.warning('ninja_log is not found: %s', ninja_log_path) return None ninja_log_content = osutils.ReadFile(ninja_log_path) try: st = os.stat(ninja_log_path) ninja_log_mtime = datetime.datetime.fromtimestamp(st.st_mtime) except OSError: logging.exception('Failed to get timestamp: %s', ninja_log_path) return None ninja_log_info = self._BuildNinjaInfo(compiler_proxy_path) # Append metadata at the end of the log content. ninja_log_content += '# end of ninja log\n' + json.dumps( ninja_log_info) # Aligned with goma_utils in chromium bot. pid = os.getpid() archive_ninja_log_path = os.path.join( self._log_dir, 'ninja_log.%s.%s.%s.%d' % (getpass.getuser(), cros_build_lib.GetHostName(), ninja_log_mtime.strftime('%Y%m%d-%H%M%S'), pid)) osutils.WriteFile(archive_ninja_log_path, ninja_log_content) archived_filename = os.path.basename(archive_ninja_log_path) + '.gz' archived_path = os.path.join(self._dest_dir, archived_filename) cros_build_lib.CompressFile(archive_ninja_log_path, archived_path) return archived_filename
def _ArchiveInfoFiles(self, pattern): """Archives INFO files matched with pattern, with gzip'ing. Args: pattern: matching path pattern. Returns: A list of tuples of (info_file_path, archived_file_name). """ # Find files matched with the pattern in |log_dir|. Sort for # stabilization. paths = sorted( glob.glob(os.path.join(self._log_dir, '%s.*.INFO.*' % pattern))) if not paths: logging.warning('No glog files matched with: %s', pattern) result = [] for path in paths: logging.info('Compressing %s', path) archived_filename = os.path.basename(path) + '.gz' dest_filepath = os.path.join(self._dest_dir, archived_filename) cros_build_lib.CompressFile(path, dest_filepath) result.append((path, archived_filename)) return result