def process_misc_info_txt(system_target_files_temp_dir, other_target_files_temp_dir, output_target_files_temp_dir, system_misc_info_keys): """Perform special processing for META/misc_info.txt. This function merges the contents of the META/misc_info.txt files from the system directory and the other directory, placing the merged result in the output directory. The precondition in that the files are already extracted. The post condition is that the output META/misc_info.txt contains the merged content. Args: system_target_files_temp_dir: The name of a directory containing the special items extracted from the system target files package. other_target_files_temp_dir: The name of a directory containing the special items extracted from the other target files package. output_target_files_temp_dir: The name of a directory that will be used to create the output target files package after all the special cases are processed. system_misc_info_keys: A list of keys to obtain from the system instance of META/misc_info.txt. The remaining keys from the other instance. """ def read_helper(d): misc_info_txt = os.path.join(d, 'META', 'misc_info.txt') with open(misc_info_txt) as f: return list(f.read().splitlines()) system_info_dict = common.LoadDictionaryFromLines( read_helper(system_target_files_temp_dir)) # We take most of the misc info from the other target files. merged_info_dict = common.LoadDictionaryFromLines( read_helper(other_target_files_temp_dir)) # Replace certain values in merged_info_dict with values from # system_info_dict. for key in system_misc_info_keys: merged_info_dict[key] = system_info_dict[key] # Merge misc info keys used for Dynamic Partitions. if (merged_info_dict.get('use_dynamic_partitions') == 'true') and (system_info_dict.get('use_dynamic_partitions') == 'true'): merged_dynamic_partitions_dict = merge_dynamic_partition_info_dicts( system_dict=system_info_dict, other_dict=merged_info_dict, size_prefix='super_', size_suffix='_group_size', list_prefix='super_', list_suffix='_partition_list') merged_info_dict.update(merged_dynamic_partitions_dict) output_misc_info_txt = os.path.join(output_target_files_temp_dir, 'META', 'misc_info.txt') with open(output_misc_info_txt, 'w') as output: sorted_keys = sorted(merged_info_dict.keys()) for key in sorted_keys: output.write('{}={}\n'.format(key, merged_info_dict[key]))
def process_dynamic_partitions_info_txt(system_target_files_dir, other_target_files_dir, output_target_files_dir): """Perform special processing for META/dynamic_partitions_info.txt. This function merges the contents of the META/dynamic_partitions_info.txt files from the system directory and the other directory, placing the merged result in the output directory. This function does nothing if META/dynamic_partitions_info.txt from the other directory does not exist. Args: system_target_files_dir: The name of a directory containing the special items extracted from the system target files package. other_target_files_dir: The name of a directory containing the special items extracted from the other target files package. output_target_files_dir: The name of a directory that will be used to create the output target files package after all the special cases are processed. """ if not os.path.exists( os.path.join(other_target_files_dir, 'META', 'dynamic_partitions_info.txt')): return def read_helper(d): dynamic_partitions_info_txt = os.path.join( d, 'META', 'dynamic_partitions_info.txt') with open(dynamic_partitions_info_txt) as f: return list(f.read().splitlines()) system_dynamic_partitions_dict = common.LoadDictionaryFromLines( read_helper(system_target_files_dir)) other_dynamic_partitions_dict = common.LoadDictionaryFromLines( read_helper(other_target_files_dir)) merged_dynamic_partitions_dict = merge_dynamic_partition_info_dicts( system_dict=system_dynamic_partitions_dict, other_dict=other_dynamic_partitions_dict, # META/dynamic_partitions_info.txt does not use dynamic_partition_list. include_dynamic_partition_list=False, size_suffix='_size', list_suffix='_partition_list') output_dynamic_partitions_info_txt = os.path.join( output_target_files_dir, 'META', 'dynamic_partitions_info.txt') with open(output_dynamic_partitions_info_txt, 'w') as output: sorted_keys = sorted(merged_dynamic_partitions_dict.keys()) for key in sorted_keys: output.write('{}={}\n'.format(key, merged_dynamic_partitions_dict[key]))
def merge_dynamic_partition_metadata(qssi_dynamic_partition_metadata_file, target_dynamic_partition_metadata_file, merged_dynamic_partition_metadata_file): """ This function merges the contents of the dynamic_partition_metadata.txt files from the qssi directory and the target directory, placing the merged result in the merged dynamic partition metadata file. Args: qssi_dynamic_partition_metadata_file: File containing QSSI dynamic partition information generated during QSSI lunch make. target_dynamic_partition_metadata_file: File containing QSSI dynamic partition information generated during target lunch make. merged_dynamic_partition_metadata_file : This file will contain collated metadata information of dynamic partitions from qssi and target. """ def read_helper(dynamic_metadata_file): with open(dynamic_metadata_file) as f: return list(f.read().splitlines()) qssi_metadata_dict = common.LoadDictionaryFromLines( read_helper(qssi_dynamic_partition_metadata_file)) merged_metadata_dict = common.LoadDictionaryFromLines( read_helper(target_dynamic_partition_metadata_file)) if merged_metadata_dict['use_dynamic_partitions'] == 'true': merged_metadata_dict['dynamic_partition_list'] = '%s %s' % ( qssi_metadata_dict.get('dynamic_partition_list', ''), merged_metadata_dict.get('dynamic_partition_list', '')) else: logger.warning("Dynamic patiitions is not enabled, Exiting!!") sys.exit(1) for partition_group in merged_metadata_dict[ 'super_partition_groups'].split(' '): key = 'super_%s_partition_list' % partition_group merged_metadata_dict[key] = '%s %s' % (qssi_metadata_dict.get( key, ''), merged_metadata_dict.get(key, '')) sorted_keys = sorted(merged_metadata_dict.keys()) if os.path.exists(merged_dynamic_partition_metadata_file): os.remove(merged_dynamic_partition_metadata_file) with open(merged_dynamic_partition_metadata_file, 'w') as merged_dpm_file: for key in sorted_keys: merged_dpm_file.write('{}={}\n'.format(key, merged_metadata_dict[key])) logger.info("Generated merged dynamic partition metdata file : %s", merged_dynamic_partition_metadata_file)
def process_dynamic_partitions_info_txt(framework_target_files_dir, vendor_target_files_dir, output_target_files_dir): """Perform special processing for META/dynamic_partitions_info.txt. This function merges the contents of the META/dynamic_partitions_info.txt files from the framework directory and the vendor directory, placing the merged result in the output directory. This function does nothing if META/dynamic_partitions_info.txt from the vendor directory does not exist. Args: framework_target_files_dir: The name of a directory containing the special items extracted from the framework target files package. vendor_target_files_dir: The name of a directory containing the special items extracted from the vendor target files package. output_target_files_dir: The name of a directory that will be used to create the output target files package after all the special cases are processed. """ if not os.path.exists( os.path.join(vendor_target_files_dir, 'META', 'dynamic_partitions_info.txt')): return def read_helper(d): dynamic_partitions_info_txt = os.path.join(d, 'META', 'dynamic_partitions_info.txt') with open(dynamic_partitions_info_txt) as f: return list(f.read().splitlines()) framework_dynamic_partitions_dict = common.LoadDictionaryFromLines( read_helper(framework_target_files_dir)) vendor_dynamic_partitions_dict = common.LoadDictionaryFromLines( read_helper(vendor_target_files_dir)) merged_dynamic_partitions_dict = merge_dynamic_partition_info_dicts( framework_dict=framework_dynamic_partitions_dict, vendor_dict=vendor_dynamic_partitions_dict, # META/dynamic_partitions_info.txt does not use dynamic_partition_list. include_dynamic_partition_list=False, size_suffix='_size', list_suffix='_partition_list') output_dynamic_partitions_info_txt = os.path.join( output_target_files_dir, 'META', 'dynamic_partitions_info.txt') write_sorted_data( data=merged_dynamic_partitions_dict, path=output_dynamic_partitions_info_txt)
def process_misc_info_txt(system_target_files_temp_dir, other_target_files_temp_dir, output_target_files_temp_dir): """Perform special processing for META/misc_info.txt This function merges the contents of the META/misc_info.txt files from the system directory and the other directory, placing the merged result in the output directory. The precondition in that the files are already extracted. The post condition is that the output META/misc_info.txt contains the merged content. Args: system_target_files_temp_dir: The name of a directory containing the special items extracted from the system target files package. other_target_files_temp_dir: The name of a directory containing the special items extracted from the other target files package. output_target_files_temp_dir: The name of a directory that will be used to create the output target files package after all the special cases are processed. """ def read_helper(d): misc_info_txt = os.path.join(d, 'META', 'misc_info.txt') with open(misc_info_txt) as f: return list(f.read().splitlines()) system_info_dict = common.LoadDictionaryFromLines( read_helper(system_target_files_temp_dir)) # We take most of the misc info from the other target files. merged_info_dict = common.LoadDictionaryFromLines( read_helper(other_target_files_temp_dir)) # Replace certain values in merged_info_dict with values from # system_info_dict. TODO(b/124467065): This should be more flexible than # using the hard-coded system_misc_info_keys. for key in system_misc_info_keys: merged_info_dict[key] = system_info_dict[key] output_misc_info_txt = os.path.join(output_target_files_temp_dir, 'META', 'misc_info.txt') sorted_keys = sorted(merged_info_dict.keys()) with open(output_misc_info_txt, 'w') as output: for key in sorted_keys: output.write('{}={}\n'.format(key, merged_info_dict[key]))
def test_retrofit_vab(self): self.info_dict.update( common.LoadDictionaryFromLines(""" virtual_ab=true virtual_ab_retrofit=true """.split("\n"))) CheckPartitionSizes(self.info_dict)
def test_non_dap(self): self.info_dict.update( common.LoadDictionaryFromLines(""" use_dynamic_partitions=false """.split("\n"))) with self.assertRaises(RuntimeError): CheckPartitionSizes(self.info_dict)
def test_ab_partition_too_big(self): self.info_dict.update( common.LoadDictionaryFromLines(""" system_image_size=100 """.split("\n"))) with self.assertRaises(RuntimeError): CheckPartitionSizes(self.info_dict)
def test_ab_group_too_big(self): self.info_dict.update( common.LoadDictionaryFromLines(""" super_group_group_size=110 """.split("\n"))) with self.assertRaises(RuntimeError): CheckPartitionSizes(self.info_dict)
def test_non_ab(self): self.info_dict.update(common.LoadDictionaryFromLines(""" ab_update=false super_partition_size=101 super_super_device_size=101 """.split("\n"))) CheckPartitionSizes(self.info_dict)
def test_vab(self): self.info_dict.update(common.LoadDictionaryFromLines(""" virtual_ab=true super_partition_size=101 super_super_device_size=101 """.split("\n"))) CheckPartitionSizes(self.info_dict)
def test_retrofit_vab_too_big(self): self.info_dict.update(common.LoadDictionaryFromLines(""" virtual_ab=true virtual_ab_retrofit=true system_image_size=100 """.split("\n"))) with self.assertRaises(RuntimeError): CheckPartitionSizes(self.info_dict)
def test_vab_too_big_with_system_other(self): self.info_dict.update(common.LoadDictionaryFromLines(""" virtual_ab=true system_other_image_size=20 super_partition_size=101 super_super_device_size=101 """.split("\n"))) with self.assertRaises(RuntimeError): CheckPartitionSizes(self.info_dict)
def main(argv): def option_handler(o, a): if o in ("-I", "--ifwi-directory"): OPTIONS.ifwi_directory = a elif o in ("-A", "--avb-key"): OPTIONS.avb_key = a OPTIONS.all_keys.add(a) elif o in ("-K", "--oem-key"): OPTIONS.oem_key = a OPTIONS.all_keys.add(a) elif o in ("-V", "--variant"): OPTIONS.variant = a else: return False return True args = common.ParseOptions(argv, __doc__, extra_opts="I:A:K:V:", extra_long_opts=[ "ifwi-directory=", "avb-key=", "oem-key=", "variant=" ], extra_option_handler=option_handler) if len(args) != 2: common.Usage(__doc__) sys.exit(1) output_fastboot_fn = args[1] print("Extracting the provdata.zip") prov_file = "provdata_" + OPTIONS.variant + ".zip" unpack_dir = common.UnzipTemp(args[0]) input_zip = zipfile.ZipFile(args[0], "r") input_provzip = zipfile.ZipFile( os.path.join(unpack_dir, "RADIO", prov_file), "r") print("Parsing build.prop for target_product") d = {} try: with open(os.path.join(unpack_dir, "SYSTEM", "build.prop")) as f: d = common.LoadDictionaryFromLines(f.read().split("\n")) except IOError as e: if e.errno == errno.ENOENT: raise KeyError(f) OPTIONS.target_product = d["ro.product.system.name"] print("Processing private keys") OPTIONS.info_dict = common.LoadInfoDict(input_zip) passwords = common.GetKeyPasswords(OPTIONS.all_keys) #process the provdata.zip to generate resigned one process_provzip(input_provzip, output_fastboot_fn) common.ZipClose(input_zip) print("Extract done.")
def LoadInfoDict(input_dir): d = {} try: with open(os.path.join(input_dir, "sprd_misc_info.txt"), "rb") as f: data_sprd_misc = f.read() d = common.LoadDictionaryFromLines(data_sprd_misc.split("\n")) except IOError, e: print "can't find sprd_misc_info.txt!"
def test_retrofit_dap(self): self.info_dict.update(common.LoadDictionaryFromLines(""" dynamic_partition_retrofit=true super_block_devices=system vendor super_system_device_size=75 super_vendor_device_size=25 super_partition_size=100 """.split("\n"))) CheckPartitionSizes(self.info_dict)
def _LoadOemDicts(oem_source): """Returns the list of loaded OEM properties dict.""" if not oem_source: return None oem_dicts = [] for oem_file in oem_source: with open(oem_file) as fp: oem_dicts.append(common.LoadDictionaryFromLines(fp.readlines())) return oem_dicts
def test_block_devices_not_match(self): self.info_dict.update(common.LoadDictionaryFromLines(""" dynamic_partition_retrofit=true super_block_devices=system vendor super_system_device_size=80 super_vendor_device_size=25 super_partition_size=100 """.split("\n"))) with self.assertRaises(RuntimeError): CheckPartitionSizes(self.info_dict)
def ReplaceMiscInfoTxt(input_zip, output_zip, misc_info): """Replaces META/misc_info.txt. Only writes back the ones in the original META/misc_info.txt. Because the current in-memory dict contains additional items computed at runtime. """ misc_info_old = common.LoadDictionaryFromLines( input_zip.read('META/misc_info.txt').split('\n')) items = [] for key in sorted(misc_info): if key in misc_info_old: items.append('%s=%s' % (key, misc_info[key])) common.ZipWriteStr(output_zip, "META/misc_info.txt", '\n'.join(items))
def LoadInfoDict_amlogic(info_dict, input_file, input_dir=None): """Read and parse the META/misc_info.txt key/value pairs from the input target files and return a dict.""" data = input_file.read("VENDOR/build.prop") data += input_file.read("VENDOR/default.prop") info_dict["vendor.prop"] = common.LoadDictionaryFromLines(data.split("\n")) print("--- *************** ---") common.DumpInfoDict(info_dict) return True
def setUp(self): self.info_dict = common.LoadDictionaryFromLines(""" use_dynamic_partitions=true ab_update=true super_block_devices=super dynamic_partition_list=system vendor product super_partition_groups=group super_group_partition_list=system vendor product super_partition_size=200 super_super_device_size=200 super_group_group_size=100 system_image_size=50 vendor_image_size=20 product_image_size=20 """.split("\n"))
def GetBootloaderInfo(info_dir, autosize): info_file = os.path.join(info_dir, "bootloader_image_info.txt") if os.path.isfile(info_file): info = common.LoadDictionaryFromLines(open(info_file).readlines()) else: # Preserve legacy way to get size to keep OTA generation scripts working info = {} info_file = os.path.join(info_dir, "bootloader-size.txt") info["size"] = int(open(info_file).read().strip()) info["block_size"] = None if autosize: info["size"] = 0 return info
def main(argv): def option_handler(o, a): if o in ("-I", "--ifwi-directory"): OPTIONS.ifwi_directory = a elif o in ("-A", "--avb-key"): OPTIONS.avb_key = a OPTIONS.all_keys.add(a) elif o in ("-K", "--oem-key"): OPTIONS.oem_key = a OPTIONS.all_keys.add(a) elif o in ("-V", "--variant"): OPTIONS.variant = a else: return False return True args = common.ParseOptions(argv, __doc__, extra_opts="I:A:K:V:", extra_long_opts=[ "ifwi-directory=", "avb-key=", "oem-key=", "variant=" ], extra_option_handler=option_handler) if len(args) != 2: common.Usage(__doc__) sys.exit(1) output_fastboot_fn = args[1] print "Extracting the provdata.zip" prov_file = "provdata_" + OPTIONS.variant + ".zip" unpack_dir = common.UnzipTemp(args[0]) input_zip = zipfile.ZipFile(args[0], "r") input_provzip = zipfile.ZipFile( os.path.join(unpack_dir, "RADIO", prov_file), "r") print "Parsing build.prop for target_product" d = {} try: with open(os.path.join(unpack_dir, "SYSTEM", "build.prop")) as f: d = common.LoadDictionaryFromLines(f.read().split("\n")) except IOError, e: if e.errno == errno.ENOENT: raise KeyError(f)
def BuildSuperImage(inp, out): if isinstance(inp, dict): logger.info("Building super image from info dict...") return BuildSuperImageFromDict(inp, out) if isinstance(inp, str): if os.path.isdir(inp): logger.info("Building super image from extracted target files...") return BuildSuperImageFromExtractedTargetFiles(inp, out) if zipfile.is_zipfile(inp): logger.info("Building super image from target files...") return BuildSuperImageFromTargetFiles(inp, out) if os.path.isfile(inp): with open(inp) as f: lines = f.read() logger.info("Building super image from info dict...") return BuildSuperImageFromDict(common.LoadDictionaryFromLines(lines.split("\n")), out) raise ValueError("{} is not a dictionary or a valid path".format(inp))
def process_misc_info_txt(system_target_files_temp_dir, other_target_files_temp_dir, output_target_files_temp_dir, system_misc_info_keys): """Perform special processing for META/misc_info.txt This function merges the contents of the META/misc_info.txt files from the system directory and the other directory, placing the merged result in the output directory. The precondition in that the files are already extracted. The post condition is that the output META/misc_info.txt contains the merged content. Args: system_target_files_temp_dir: The name of a directory containing the special items extracted from the system target files package. other_target_files_temp_dir: The name of a directory containing the special items extracted from the other target files package. output_target_files_temp_dir: The name of a directory that will be used to create the output target files package after all the special cases are processed. system_misc_info_keys: A list of keys to obtain from the system instance of META/misc_info.txt. The remaining keys from the other instance. """ def read_helper(d): misc_info_txt = os.path.join(d, 'META', 'misc_info.txt') with open(misc_info_txt) as f: return list(f.read().splitlines()) system_info_dict = common.LoadDictionaryFromLines( read_helper(system_target_files_temp_dir)) # We take most of the misc info from the other target files. merged_info_dict = common.LoadDictionaryFromLines( read_helper(other_target_files_temp_dir)) # Replace certain values in merged_info_dict with values from # system_info_dict. for key in system_misc_info_keys: merged_info_dict[key] = system_info_dict[key] # Merge misc info keys used for Dynamic Partitions. if (merged_info_dict.get('use_dynamic_partitions') == 'true') and (system_info_dict.get('use_dynamic_partitions') == 'true'): merged_info_dict['dynamic_partition_list'] = '%s %s' % ( system_info_dict.get('dynamic_partition_list', ''), merged_info_dict.get('dynamic_partition_list', '')) # Partition groups and group sizes are defined by the other (non-system) # misc info file because these values may vary for each board that uses # a shared system image. for partition_group in merged_info_dict[ 'super_partition_groups'].split(' '): if ('super_%s_group_size' % partition_group) not in merged_info_dict: raise common.ExternalError( 'Other META/misc_info.txt does not contain required key ' 'super_%s_group_size.' % partition_group) key = 'super_%s_partition_list' % partition_group merged_info_dict[key] = '%s %s' % (system_info_dict.get( key, ''), merged_info_dict.get(key, '')) output_misc_info_txt = os.path.join(output_target_files_temp_dir, 'META', 'misc_info.txt') sorted_keys = sorted(merged_info_dict.keys()) with open(output_misc_info_txt, 'w') as output: for key in sorted_keys: output.write('{}={}\n'.format(key, merged_info_dict[key]))
def merge_target_files(temp_dir, system_target_files, system_item_list, system_misc_info_keys, other_target_files, other_item_list, output_target_files, output_dir, output_item_list, output_ota, output_img, output_super_empty, rebuild_recovery): """Merge two target files packages together. This function takes system and other target files packages as input, performs various file extractions, special case processing, and finally creates a merged zip archive as output. Args: temp_dir: The name of a directory we use when we extract items from the input target files packages, and also a scratch directory that we use for temporary files. system_target_files: The name of the zip archive containing the system partial target files package. system_item_list: The list of items to extract from the partial system target files package as is, meaning these items will land in the output target files package exactly as they appear in the input partial system target files package. system_misc_info_keys: The list of keys to obtain from the system instance of META/misc_info.txt. The remaining keys from the other instance. other_target_files: The name of the zip archive containing the other partial target files package. other_item_list: The list of items to extract from the partial other target files package as is, meaning these items will land in the output target files package exactly as they appear in the input partial other target files package. output_target_files: The name of the output zip archive target files package created by merging system and other. output_dir: The destination directory for saving merged files. output_item_list: The list of items to copy into the output_dir. output_ota: The name of the output zip archive ota package. output_img: The name of the output zip archive img package. output_super_empty: If provided, creates a super_empty.img file from the merged target files package and saves it at this path. rebuild_recovery: If true, rebuild the recovery patch used by non-A/B devices and write it to the system image. """ logger.info('starting: merge system %s and other %s into output %s', system_target_files, other_target_files, output_target_files) # Create directory names that we'll use when we extract files from system, # and other, and for zipping the final output. system_target_files_temp_dir = os.path.join(temp_dir, 'system') other_target_files_temp_dir = os.path.join(temp_dir, 'other') output_target_files_temp_dir = os.path.join(temp_dir, 'output') # Extract "as is" items from the input system partial target files package. # We extract them directly into the output temporary directory since the # items do not need special case processing. extract_items(target_files=system_target_files, target_files_temp_dir=output_target_files_temp_dir, extract_item_list=system_item_list) # Extract "as is" items from the input other partial target files package. We # extract them directly into the output temporary directory since the items # do not need special case processing. extract_items(target_files=other_target_files, target_files_temp_dir=output_target_files_temp_dir, extract_item_list=other_item_list) # Extract "special" items from the input system partial target files package. # We extract these items to different directory since they require special # processing before they will end up in the output directory. extract_items(target_files=system_target_files, target_files_temp_dir=system_target_files_temp_dir, extract_item_list=system_extract_special_item_list) # Extract "special" items from the input other partial target files package. # We extract these items to different directory since they require special # processing before they will end up in the output directory. extract_items(target_files=other_target_files, target_files_temp_dir=other_target_files_temp_dir, extract_item_list=other_extract_special_item_list) # Now that the temporary directories contain all the extracted files, perform # special case processing on any items that need it. After this function # completes successfully, all the files we need to create the output target # files package are in place. process_special_cases( system_target_files_temp_dir=system_target_files_temp_dir, other_target_files_temp_dir=other_target_files_temp_dir, output_target_files_temp_dir=output_target_files_temp_dir, system_misc_info_keys=system_misc_info_keys, rebuild_recovery=rebuild_recovery) # Regenerate IMAGES in the temporary directory. add_img_args = ['--verbose'] if rebuild_recovery: add_img_args.append('--rebuild_recovery') add_img_args.append(output_target_files_temp_dir) add_img_to_target_files.main(add_img_args) # Create super_empty.img using the merged misc_info.txt. misc_info_txt = os.path.join(output_target_files_temp_dir, 'META', 'misc_info.txt') def read_helper(): with open(misc_info_txt) as f: return list(f.read().splitlines()) use_dynamic_partitions = common.LoadDictionaryFromLines( read_helper()).get('use_dynamic_partitions') if use_dynamic_partitions != 'true' and output_super_empty: raise ValueError( 'Building super_empty.img requires use_dynamic_partitions=true.') elif use_dynamic_partitions == 'true': super_empty_img = os.path.join(output_target_files_temp_dir, 'IMAGES', 'super_empty.img') build_super_image_args = [ misc_info_txt, super_empty_img, ] build_super_image.main(build_super_image_args) # Copy super_empty.img to the user-provided output_super_empty location. if output_super_empty: shutil.copyfile(super_empty_img, output_super_empty) # Create the IMG package from the merged target files (before zipping, in # order to avoid an unnecessary unzip and copy). if output_img: img_from_target_files_args = [ output_target_files_temp_dir, output_img, ] img_from_target_files.main(img_from_target_files_args) # Finally, create the output target files zip archive and/or copy the # output items to the output target files directory. if output_dir: copy_items(output_target_files_temp_dir, output_dir, output_item_list) if not output_target_files: return output_zip = os.path.abspath(output_target_files) output_target_files_list = os.path.join(temp_dir, 'output.list') output_target_files_meta_dir = os.path.join(output_target_files_temp_dir, 'META') find_command = [ 'find', output_target_files_meta_dir, ] find_process = common.Run(find_command, stdout=subprocess.PIPE, verbose=False) meta_content = common.RunAndCheckOutput(['sort'], stdin=find_process.stdout, verbose=False) find_command = [ 'find', output_target_files_temp_dir, '-path', output_target_files_meta_dir, '-prune', '-o', '-print' ] find_process = common.Run(find_command, stdout=subprocess.PIPE, verbose=False) other_content = common.RunAndCheckOutput(['sort'], stdin=find_process.stdout, verbose=False) with open(output_target_files_list, 'wb') as f: f.write(meta_content) f.write(other_content) command = [ 'soong_zip', '-d', '-o', output_zip, '-C', output_target_files_temp_dir, '-l', output_target_files_list, ] logger.info('creating %s', output_target_files) common.RunAndWait(command, verbose=True) # Create the OTA package from the merged target files package. if output_ota: ota_from_target_files_args = [ output_zip, output_ota, ] ota_from_target_files.main(ota_from_target_files_args)
def process_misc_info_txt(system_target_files_temp_dir, other_target_files_temp_dir, output_target_files_temp_dir, system_misc_info_keys): """Perform special processing for META/misc_info.txt. This function merges the contents of the META/misc_info.txt files from the system directory and the other directory, placing the merged result in the output directory. The precondition in that the files are already extracted. The post condition is that the output META/misc_info.txt contains the merged content. Args: system_target_files_temp_dir: The name of a directory containing the special items extracted from the system target files package. other_target_files_temp_dir: The name of a directory containing the special items extracted from the other target files package. output_target_files_temp_dir: The name of a directory that will be used to create the output target files package after all the special cases are processed. system_misc_info_keys: A list of keys to obtain from the system instance of META/misc_info.txt. The remaining keys from the other instance. """ def read_helper(d): misc_info_txt = os.path.join(d, 'META', 'misc_info.txt') with open(misc_info_txt) as f: return list(f.read().splitlines()) system_info_dict = common.LoadDictionaryFromLines( read_helper(system_target_files_temp_dir)) # We take most of the misc info from the other target files. merged_info_dict = common.LoadDictionaryFromLines( read_helper(other_target_files_temp_dir)) # Replace certain values in merged_info_dict with values from # system_info_dict. for key in system_misc_info_keys: merged_info_dict[key] = system_info_dict[key] # Merge misc info keys used for Dynamic Partitions. if (merged_info_dict.get('use_dynamic_partitions') == 'true') and (system_info_dict.get('use_dynamic_partitions') == 'true'): merged_dynamic_partitions_dict = merge_dynamic_partition_info_dicts( system_dict=system_info_dict, other_dict=merged_info_dict, size_prefix='super_', size_suffix='_group_size', list_prefix='super_', list_suffix='_partition_list') merged_info_dict.update(merged_dynamic_partitions_dict) # Ensure that add_img_to_target_files rebuilds super split images for # devices that retrofit dynamic partitions. This flag may have been set to # false in the partial builds to prevent duplicate building of super.img. merged_info_dict['build_super_partition'] = 'true' output_misc_info_txt = os.path.join(output_target_files_temp_dir, 'META', 'misc_info.txt') write_sorted_data(data=merged_info_dict, path=output_misc_info_txt)
def process_misc_info_txt(framework_target_files_temp_dir, vendor_target_files_temp_dir, output_target_files_temp_dir, framework_misc_info_keys): """Perform special processing for META/misc_info.txt. This function merges the contents of the META/misc_info.txt files from the framework directory and the vendor directory, placing the merged result in the output directory. The precondition in that the files are already extracted. The post condition is that the output META/misc_info.txt contains the merged content. Args: framework_target_files_temp_dir: The name of a directory containing the special items extracted from the framework target files package. vendor_target_files_temp_dir: The name of a directory containing the special items extracted from the vendor target files package. output_target_files_temp_dir: The name of a directory that will be used to create the output target files package after all the special cases are processed. framework_misc_info_keys: A list of keys to obtain from the framework instance of META/misc_info.txt. The remaining keys from the vendor instance. """ def read_helper(d): misc_info_txt = os.path.join(d, 'META', 'misc_info.txt') with open(misc_info_txt) as f: return list(f.read().splitlines()) framework_dict = common.LoadDictionaryFromLines( read_helper(framework_target_files_temp_dir)) # We take most of the misc info from the vendor target files. merged_dict = common.LoadDictionaryFromLines( read_helper(vendor_target_files_temp_dir)) # Replace certain values in merged_dict with values from # framework_dict. for key in framework_misc_info_keys: merged_dict[key] = framework_dict[key] # Merge misc info keys used for Dynamic Partitions. if (merged_dict.get('use_dynamic_partitions') == 'true') and ( framework_dict.get('use_dynamic_partitions') == 'true'): merged_dynamic_partitions_dict = merge_dynamic_partition_info_dicts( framework_dict=framework_dict, vendor_dict=merged_dict, size_prefix='super_', size_suffix='_group_size', list_prefix='super_', list_suffix='_partition_list') merged_dict.update(merged_dynamic_partitions_dict) # Ensure that add_img_to_target_files rebuilds super_empty.img. This flag # may have been set to false in the partial builds to prevent duplicate # building of super.img and super_empty.img. merged_dict['build_super_partition'] = 'true' # Replace <image>_selinux_fc values with framework or vendor file_contexts.bin # depending on which dictionary the key came from. # Only the file basename is required because all selinux_fc properties are # replaced with the full path to the file under META/ when misc_info.txt is # loaded from target files for repacking. See common.py LoadInfoDict(). for key in merged_dict: if key.endswith('_selinux_fc'): merged_dict[key] = 'vendor_file_contexts.bin' for key in framework_dict: if key.endswith('_selinux_fc'): merged_dict[key] = 'framework_file_contexts.bin' output_misc_info_txt = os.path.join(output_target_files_temp_dir, 'META', 'misc_info.txt') write_sorted_data(data=merged_dict, path=output_misc_info_txt)