def emit_manifests(args, selected, unselected, input_binaries): def update_file(file, contents): if os.path.exists(file) and os.path.getsize(file) == len(contents): with open(file, 'r') as f: if f.read() == contents: return with open(file, 'w') as f: f.write(contents) # The name of every file we examine to make decisions goes into this set. examined = set(args.manifest) # Collect all the inputs and reify. aux_binaries = collect_auxiliaries(unselected, examined) binaries, nonbinaries = collect_binaries(selected, input_binaries, aux_binaries, examined) # Prepare to collate groups. outputs = [output_manifest(file, []) for file in args.output] # Finalize the output binaries. binaries, debug_files = strip_binary_manifest(binaries, args.stripped_dir, examined) # Collate groups. for entry in itertools.chain((binary.entry for binary in binaries), nonbinaries): outputs[entry.group].manifest.append(entry._replace(group=None)) all_binaries = {binary.info.build_id: binary.entry for binary in binaries} all_debug_files = {info.build_id: info for info in debug_files} # Emit each primary manifest. for output in outputs: depfile_output = output.file # Sort so that functionally identical output is textually # identical. output.manifest.sort(key=lambda entry: entry.target) update_file(output.file, manifest.format_manifest_file(output.manifest)) # Emit the build ID list. # Sort so that functionally identical output is textually identical. debug_files = sorted(all_debug_files.itervalues(), key=lambda info: info.build_id) update_file( args.build_id_file, ''.join(info.build_id + ' ' + os.path.abspath(info.filename) + '\n' for info in debug_files)) # Emit the depfile. if args.depfile: with open(args.depfile, 'w') as f: f.write(depfile_output + ':') for file in sorted(examined): f.write(' ' + file) f.write('\n')
def emit_manifests(args, selected, unselected, input_binaries): def update_file(file, contents, force=False): if (not force and os.path.exists(file) and os.path.getsize(file) == len(contents)): with open(file, 'r') as f: if f.read() == contents: return with open(file, 'w') as f: f.write(contents) # The name of every file we examine to make decisions goes into this set. examined = set(args.manifest) # Collect all the inputs and reify. aux_binaries = collect_auxiliaries(unselected, examined) binaries, nonbinaries = collect_binaries( selected, input_binaries, aux_binaries, examined) # Prepare to collate groups. outputs = [output_manifest(file, []) for file in args.output] # Finalize the output binaries. If stripping wrote any new/changed files, # then force an update of the manifest file even if it's identical. The # manifest file's timestamp is what GN/Ninja sees as running this script # having touched any of its outputs, and GN/Ninja doesn't know that the # stripped files are implicit outputs (there's no such thing as a depfile # for outputs, only for inputs). binaries, debug_files, force_update = strip_binary_manifest( binaries, args.stripped_dir, args.build_id_dir, args.toolchain_lib_dir, examined) # Collate groups. for entry in itertools.chain((binary.entry for binary in binaries), nonbinaries): outputs[entry.group].manifest.append(entry._replace(group=None)) all_binaries = {binary.info.build_id: binary.entry for binary in binaries} all_debug_files = {info.build_id: info for info in debug_files} # Emit each primary manifest. for output in outputs: depfile_output = output.file # Sort so that functionally identical output is textually # identical. output.manifest.sort(key=lambda entry: entry.target) update_file( output.file, manifest.format_manifest_file(output.manifest), force_update) # Emit the depfile. if args.depfile: with open(args.depfile, 'w') as f: f.write(depfile_output + ':') for file in sorted(examined): f.write(' ' + file) f.write('\n')
def emit_manifests(args, selected, unselected, input_binaries, standalone_output): def update_file(file, contents): if os.path.exists(file) and os.path.getsize(file) == len(contents): with open(file, 'r') as f: if f.read() == contents: return with open(file, 'w') as f: f.write(contents) # The name of every file we examine to make decisions goes into this set. examined = set(input.file for input in args.manifest) # Collect all the inputs and reify. aux_binaries = collect_auxiliaries(unselected, examined) binaries, nonbinaries = collect_binaries(selected, input_binaries, aux_binaries, examined) # Finalize the output binaries. binaries, debug_files = strip_binary_manifest(binaries, 'stripped', examined) # Collate groups. outputs = [output_manifest(file, []) for file in args.output] for entry in itertools.chain((binary.entry for binary in binaries), nonbinaries): if entry.group is not None: outputs[entry.group].manifest.append(entry._replace(group=None)) all_binaries = {binary.info.build_id: binary.entry for binary in binaries} all_debug_files = {info.build_id: info for info in debug_files} # TODO(US-390): As a stopgap until there is a smarter loader service, # we'll toss every shared library used by any package into the system # manifest. Drop this behavior when it's no longer needed. global_soname = set(binary.info.soname for binary in binaries if binary.info.soname) # Now handle the standalone outputs. These reuse the same # aux_binaries, but ignore all the work done for the system image # manifests. For some shared libraries it will be repeating the work # already done, but doing so lets it get different results for # variants, which can be fine in different standalone manifests, # whereas everything in the system image has to agree about the # shared library variants to install. for output, selected in standalone_output.iteritems(): binaries, nonbinaries = collect_binaries(selected, [], aux_binaries, examined) # Partition into binaries that have already been used in other # output manifests and new binaries. For the reused binaries, # we can reuse the debug file discovery/stripping already done. reused_binaries = [] new_binaries = [] for binary in binaries: reused = all_binaries.get(binary.info.build_id, None) if reused is None: new_binaries.append(binary) else: reused_binaries.append(reused) # Find (or make) debug files for new binaries and update # the sets of binaries and debug files already processed. binaries, debug_files = strip_binary_manifest( new_binaries, 'stripped', examined) all_binaries.update( {binary.info.build_id: binary.entry for binary in binaries}) all_debug_files.update( {info.build_id: info for info in debug_files}) # TODO(US-390): Remove this later; see comment above. for binary in binaries: if binary.info.soname and binary.info.soname not in global_soname: outputs[-1].manifest.append(binary.entry._replace(group=None)) global_soname.add(binary.info.soname) # Finally, emit the standalone manifest. # Sort so that functionally identical output is textually identical. update_file(output, manifest.format_manifest_file(sorted( (entry._replace(group=None) for entry in itertools.chain(reused_binaries, (binary.entry for binary in binaries), nonbinaries)), key=lambda entry: entry.target))) # Emit each primary manifest. # Sort so that functionally identical output is textually identical. for output in outputs: output.manifest.sort(key=lambda entry: entry.target) update_file(output.file, manifest.format_manifest_file(output.manifest)) # Emit the build ID list. # Sort so that functionally identical output is textually identical. debug_files = sorted(all_debug_files.itervalues(), key=lambda info: info.build_id) update_file(args.build_id_file, ''.join( info.build_id + ' ' + os.path.abspath(info.filename) + '\n' for info in debug_files)) # Emit the depfile. if args.depfile: with open(args.depfile, 'w') as f: f.write(outputs[0].file + ':') for file in sorted(examined): f.write(' ' + file) f.write('\n')