def main(): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('--image-assembly-config', type=argparse.FileType('r'), required=True, help='The path to the image assembly config file') parser.add_argument( '--images-config', type=argparse.FileType('r'), help='The path to the image assembly images config file') parser.add_argument( '--output', type=str, required=True, help='The path to the first output of the image assembly target') parser.add_argument('--depfile', type=argparse.FileType('w'), required=True, help='The path to the depfile for this script') args = parser.parse_args() config = ImageAssemblyConfig.json_load(args.image_assembly_config) # Collect the list of files that are read in this script. deps = [] deps.extend(config.base) deps.extend(config.cache) deps.extend(config.system) if deps: dep_file = DepFile(args.output) dep_file.update(deps) dep_file.write_to(args.depfile) # Collect the list of inputs to image assembly. inputs = [] inputs.extend(files_from_package_set(config.base)) inputs.extend(files_from_package_set(config.cache)) inputs.extend(files_from_package_set(config.system)) inputs.extend([entry.source for entry in config.bootfs_files]) inputs.append(config.kernel.path) if args.images_config: images_config = json.load(args.images_config)['images'] for image in images_config: if image['type'] == 'vbmeta': if 'key' in image: inputs.append(image['key']) if 'key_metadata' in image: inputs.append(image['key_metadata']) inputs.extend(image.get('additional_descriptor_files', [])) elif image['type'] == 'zbi': if 'postprocessing_script' in image: script = image['postprocessing_script'] if 'path' in script: inputs.append(script['path']) with open(args.output, 'w') as f: for input in inputs: f.write(input + '\n')
def main(): parser = argparse.ArgumentParser( description= "Create an image assembly configuration that is what remains after removing the configs to 'subtract'" ) parser.add_argument("--image-assembly-config", type=argparse.FileType('r'), required=True) parser.add_argument("--config-data-entries", type=argparse.FileType('r')) parser.add_argument("--subtract", default=[], nargs="*", type=argparse.FileType('r')) parser.add_argument("--outdir", required=True) parser.add_argument("--depfile", type=argparse.FileType('w')) parser.add_argument("--export-manifest", type=argparse.FileType('w')) args = parser.parse_args() # Read in the legacy config and the others to subtract from it legacy: ImageAssemblyConfig = ImageAssemblyConfig.json_load( args.image_assembly_config) subtract = [ ImageAssemblyConfig.json_load(other) for other in args.subtract ] # Subtract each from the legacy config, in the order given in args. for other in subtract: legacy = legacy.difference(other) # Read in the config_data entries if available. if args.config_data_entries: config_data_entries = [ FileEntry.from_dict(entry) for entry in json.load(args.config_data_entries) ] else: config_data_entries = [] # Create an Assembly Input Bundle from the remaining contents (assembly_input_bundle, assembly_config_manifest_path, deps) = copy_to_assembly_input_bundle(legacy, config_data_entries, args.outdir) # Write out a fini manifest of the files that have been copied, to create a # package or archive that contains all of the files in the bundle. if args.export_manifest: assembly_input_bundle.write_fini_manifest(args.export_manifest, base_dir=args.outdir) # Write out a depfile. if args.depfile: dep_file = DepFile(assembly_config_manifest_path) dep_file.update(deps) dep_file.write_to(args.depfile)
def test_depfile_writing(self): depfile = DepFile("/foo/bar/baz/output", rebase="/foo/bar") depfile.update([ "/foo/bar/things/input_a", "/foo/bar/things/input_b", "/foo/input_c" ]) with tempfile.TemporaryFile('w+') as outfile: # Write out the depfile depfile.write_to(outfile) # Read the contents back in outfile.seek(0) contents = outfile.read() self.assertEqual(contents, DepFileTests.expected)
def main(): parser = argparse.ArgumentParser() parser.add_argument("--source-root", default=".", help="The root Fuchsia source dir") parser.add_argument("--output", type=argparse.FileType('w'), help="A file to write output to.") parser.add_argument( "--depfile", type=argparse.FileType('w'), help="A depfile of read files, this requires the use of --output") parser.add_argument("-v", "--verbose", action="store_true") args = parser.parse_args() # Setup logging / error-printing methods and utilities output = [] def log(string: str): """log (display or create output for) a non-error string""" if args.verbose: print(string) if args.output: output.append(string) def error(string: str): """output an error string""" print(string, file=sys.stderr) if args.output: output.append(string) def format_path(path: str) -> str: """Format a path for clearer displaying""" path = os.path.relpath(path, args.source_root) if os.path.isabs(path): return path else: return "//" + path log("Disallowed GN args:") for arg in disallowed_gn_args: log(f" {arg}") log(f"\nUsing regex:\n {assignment_matcher_string}\n") # Gather the fuchsia.git product definitions source_root = args.source_root products_dir = os.path.join(source_root, "products") product_def_paths = find_product_defs(products_dir) # Gather the vendor product definitions (if they exist) product_def_paths.extend(find_vendor_product_defs(source_root)) log("Scanning product defs:") results = {} for path in sorted(product_def_paths): result = validate_product_def(path) if result: results[path] = result log(f" {format_path(path)}: FAIL") else: log(f" {format_path(path)}: PASS") if results: error( "\nFound use of deprecated / disallowed GN arg in product definition:" ) for (path, errors) in results.items(): error(f" {format_path(path)}") for line_num, error_string in errors: error(f" {line_num}: {error_string}") return -1 if args.depfile: if args.output: depfile = DepFile(args.output.name) depfile.update(product_def_paths) depfile.write_to(args.depfile) else: error("Cannot create a depfile without an output file") return -2 if args.output: for line in output: print(line, file=args.output) return 0
def main(): parser = argparse.ArgumentParser( description= 'Create a flat list of files included in the images. This is used to inform infrastructure what files to upload' ) parser.add_argument('--product-config', type=argparse.FileType('r'), required=True) parser.add_argument('--images-config', type=argparse.FileType('r'), required=True) parser.add_argument('--partitions-config', type=argparse.FileType('r'), required=True) parser.add_argument('--sources', type=str, nargs='*') parser.add_argument('--output', type=argparse.FileType('w'), required=True) parser.add_argument('--depfile', type=argparse.FileType('w'), required=True) args = parser.parse_args() # The files to put in the output with source mapped to destination. file_mapping = {} # Add a file or directory path to one of the lists, relative to CWD. # The destination is the path when placed inside "built/artifacts". # If the path is prefixed with ../../, the prefix is removed. def add_source(source): # Absolute paths are not portable out-of-tree, therefore if a file is # using an absolute path we throw an error. if os.path.isabs(source): raise Exception("Absolute paths are not allowed", source) source = os.path.relpath(source, os.getcwd()) prefix = "../../" if source.startswith(prefix): destination = source[len(prefix):] else: destination = os.path.join("built/artifacts", source) file_mapping[source] = destination # Add a package and all the included blobs. manifests_for_depfile = [] def add_package(entry: Dict): manifest = entry["manifest"] manifests_for_depfile.append(manifest) add_source(manifest) with open(manifest, 'r') as f: manifest = json.load(f) for blob in manifest.get("blobs", []): add_source(blob["source_path"]) for config in entry.get("config_data", []): add_source(config["source"]) # Add the product config. add_source(args.product_config.name) product_config = json.load(args.product_config) if "product" in product_config: product = product_config["product"] if "packages" in product: packages = product["packages"] for package in packages.get("base", []): add_package(package) for package in packages.get("cache", []): add_package(package) # Add the images config. add_source(args.images_config.name) images = json.load(args.images_config).get("images", []) for image in images: if image["type"] == "vbmeta": add_source(image["key"]) add_source(image["key_metadata"]) if "additional_descriptor_files" in image: for descriptor in image["additional_descriptor_files"]: add_source(descriptor) elif image["type"] == "zbi": if "postprocessing_script" in image: add_source(image["postprocessing_script"]["path"]) # Add the partitions config. add_source(args.partitions_config.name) partitions_config = json.load(args.partitions_config) for cred in partitions_config.get("unlock_credentials", []): add_source(cred) for part in partitions_config.get("bootloader_partitions", []): add_source(part["image"]) for part in partitions_config.get("bootstrap_partitions", []): add_source(part["image"]) # Add any additional sources to copy. for source in args.sources: add_source(source) # Convert the map into a list of maps. files = [] for src, dest in file_mapping.items(): files.append({ "source": src, "destination": dest, }) # Write a depfile with any opened package manifests. if manifests_for_depfile: depfile = DepFile(args.output.name) depfile.update(manifests_for_depfile) depfile.write_to(args.depfile) # Write the list. json.dump(files, args.output, indent=2)