def PrepareOutputDir(dirname, preserve=False): """Select an output directory, ensuring it exists. This either creates a temporary directory or checks that the one supplied by the user is valid. For a temporary directory, it makes a note to remove it later if required. Args: dirname: a string, name of the output directory to use to store intermediate and output files. If is None - create a temporary directory. preserve: a Boolean. If outdir above is None and preserve is False, the created temporary directory will be destroyed on exit. Raises: OSError: If it cannot create the output directory. """ global outdir, preserve_outdir preserve_outdir = dirname or preserve if dirname: outdir = dirname if not os.path.isdir(outdir): try: os.makedirs(outdir) except OSError as err: raise CmdError("Cannot make output directory '%s': '%s'" % (outdir, err.strerror)) tout.Debug("Using output directory '%s'" % outdir) else: outdir = tempfile.mkdtemp(prefix='binman.') tout.Debug("Using temporary directory '%s'" % outdir)
def ReadChildData(self, child, decomp=True): tout.Debug("ReadChildData for child '%s'" % child.GetPath()) parent_data = self.ReadData(True) offset = child.offset - self._skip_at_start tout.Debug("Extract for child '%s': offset %#x, skip_at_start %#x, result %#x" % (child.GetPath(), child.offset, self._skip_at_start, offset)) data = parent_data[offset:offset + child.size] if decomp: indata = data data = tools.Decompress(indata, child.compress) if child.uncomp_size: tout.Info("%s: Decompressing data size %#x with algo '%s' to data size %#x" % (child.GetPath(), len(indata), child.compress, len(data))) return data
def run_cmd_result(self, *args, binary=False, raise_on_error=True): """Run the bintool using command-line arguments Args: args (list of str): Arguments to provide, in addition to the bintool name binary (bool): True to return output as bytes instead of str raise_on_error (bool): True to raise a ValueError exception if the tool returns a non-zero return code Returns: CommandResult: Resulting output from the bintool, or None if the tool is not present """ if self.name in self.missing_list: return None name = os.path.expanduser(self.name) # Expand paths containing ~ all_args = (name, ) + args env = tools.get_env_with_path() tout.Detail(f"bintool: {' '.join(all_args)}") result = command.RunPipe([all_args], capture=True, capture_stderr=True, env=env, raise_on_error=False, binary=binary) if result.return_code: # Return None if the tool was not found. In this case there is no # output from the tool and it does not appear on the path. We still # try to run it (as above) since RunPipe() allows faking the tool's # output if not any([result.stdout, result.stderr, tools.tool_find(name)]): tout.Info(f"bintool '{name}' not found") return None if raise_on_error: tout.Info(f"bintool '{name}' failed") raise ValueError("Error %d running '%s': %s" % (result.return_code, ' '.join(all_args), result.stderr or result.stdout)) if result.stdout: tout.Debug(result.stdout) if result.stderr: tout.Debug(result.stderr) return result
def ReadChildData(self, child, decomp=True, alt_format=None): tout.Debug(f"ReadChildData for child '{child.GetPath()}'") parent_data = self.ReadData(True, alt_format) offset = child.offset - self._skip_at_start tout.Debug("Extract for child '%s': offset %#x, skip_at_start %#x, result %#x" % (child.GetPath(), child.offset, self._skip_at_start, offset)) data = parent_data[offset:offset + child.size] if decomp: indata = data data = tools.Decompress(indata, child.compress) if child.uncomp_size: tout.Info("%s: Decompressing data size %#x with algo '%s' to data size %#x" % (child.GetPath(), len(indata), child.compress, len(data))) if alt_format: new_data = child.GetAltFormat(data, alt_format) if new_data is not None: data = new_data return data
def SetInputDirs(dirname): """Add a list of input directories, where input files are kept. Args: dirname: a list of paths to input directories to use for obtaining files needed by binman to place in the image. """ global indir indir = dirname tout.Debug("Using input directories %s" % indir)
def SetEntryArgs(args): """Set the value of the entry args This sets up the entry_args dict which is used to supply entry arguments to entries. Args: args: List of entry arguments, each in the format "name=value" """ global entry_args entry_args = {} tout.Debug('Processing entry args:') if args: for arg in args: m = re.match('([^=]*)=(.*)', arg) if not m: raise ValueError("Invalid entry arguemnt '%s'" % arg) name, value = m.groups() tout.Debug(' %20s = %s' % (name, value)) entry_args[name] = value tout.Debug('Processing entry args done')
def ProcessEntryContents(self): """Call the ProcessContents() method for each entry This is intended to adjust the contents as needed by the entry type. Returns: True if the new data size is OK, False if expansion is needed """ sizes_ok = True for entry in self._entries.values(): if not entry.ProcessContents(): sizes_ok = False tout.Debug("Entry '%s' size change" % self._node.path) return sizes_ok
def _AddEntries(areas, entry): entries = entry.GetEntries() tout.Debug("fmap: Add entry '%s' type '%s' (%s subentries)" % (entry.GetPath(), entry.etype, ToHexSize(entries))) if entries and entry.etype != 'cbfs': for subentry in entries.values(): _AddEntries(areas, subentry) else: pos = entry.image_pos if pos is not None: pos -= entry.section.GetRootSkipAtStart() areas.append( fmap_util.FmapArea(pos or 0, entry.size or 0, tools.FromUnicode(entry.name), 0))
def LookupAndWriteSymbols(elf_fname, entry, section): """Replace all symbols in an entry with their correct values The entry contents is updated so that values for referenced symbols will be visible at run time. This is done by finding out the symbols offsets in the entry (using the ELF file) and replacing them with values from binman's data structures. Args: elf_fname: Filename of ELF image containing the symbol information for entry entry: Entry to process section: Section which can be used to lookup symbol values """ fname = tools.GetInputFilename(elf_fname) syms = GetSymbols(fname, ['image', 'binman']) if not syms: return base = syms.get('__image_copy_start') if not base: return for name, sym in syms.items(): if name.startswith('_binman'): msg = ("Section '%s': Symbol '%s'\n in entry '%s'" % (section.GetPath(), name, entry.GetPath())) offset = sym.address - base.address if offset < 0 or offset + sym.size > entry.contents_size: raise ValueError( '%s has offset %x (size %x) but the contents ' 'size is %x' % (entry.GetPath(), offset, sym.size, entry.contents_size)) if sym.size == 4: pack_string = '<I' elif sym.size == 8: pack_string = '<Q' else: raise ValueError('%s has size %d: only 4 and 8 are supported' % (msg, sym.size)) # Look up the symbol in our entry tables. value = section.GetImage().LookupImageSymbol( name, sym.weak, msg, base.address) if value is None: value = -1 pack_string = pack_string.lower() value_bytes = struct.pack(pack_string, value) tout.Debug('%s:\n insert %s, offset %x, value %x, length %d' % (msg, name, offset, value, len(value_bytes))) entry.data = (entry.data[:offset] + value_bytes + entry.data[offset + sym.size:])
def ReadData(self, decomp=True): """Read the data for an entry from the image This is used when the image has been read in and we want to extract the data for a particular entry from that image. Args: decomp: True to decompress any compressed data before returning it; False to return the raw, uncompressed data Returns: Entry data (bytes) """ # Use True here so that we get an uncompressed section to work from, # although compressed sections are currently not supported tout.Debug("ReadChildData section '%s', entry '%s'" % (self.section.GetPath(), self.GetPath())) data = self.section.ReadChildData(self, decomp) return data
def ProcessContentsUpdate(self, data): """Update the contents of an entry, after the size is fixed This checks that the new data is the same size as the old. If the size has changed, this triggers a re-run of the packing algorithm. Args: data: Data to set to the contents (bytes) Raises: ValueError if the new data size is not the same as the old """ size_ok = True new_size = len(data) if state.AllowEntryExpansion() and new_size > self.contents_size: # self.data will indicate the new size needed size_ok = False elif state.AllowEntryContraction() and new_size < self.contents_size: size_ok = False # If not allowed to change, try to deal with it or give up if size_ok: if new_size > self.contents_size: self.Raise('Cannot update entry size from %d to %d' % (self.contents_size, new_size)) # Don't let the data shrink. Pad it if necessary if size_ok and new_size < self.contents_size: data += tools.GetBytes(0, self.contents_size - new_size) if not size_ok: tout.Debug( "Entry '%s' size change from %s to %s" % (self._node.path, ToHex(self.contents_size), ToHex(new_size))) self.SetContents(data) return size_ok
def _AddEntries(areas, entry): entries = entry.GetEntries() tout.Debug("fmap: Add entry '%s' type '%s' (%s subentries)" % (entry.GetPath(), entry.etype, ToHexSize(entries))) if entries and entry.etype != 'cbfs': # Create an area for the section, which encompasses all entries # within it if entry.image_pos is None: pos = 0 else: pos = entry.image_pos - entry.GetRootSkipAtStart() # Drop @ symbols in name name = entry.name.replace('@', '') areas.append(fmap_util.FmapArea(pos, entry.size or 0, name, 0)) for subentry in entries.values(): _AddEntries(areas, subentry) else: pos = entry.image_pos if pos is not None: pos -= entry.section.GetRootSkipAtStart() areas.append( fmap_util.FmapArea(pos or 0, entry.size or 0, entry.name, 0))
def ReadData(self, decomp=True): tout.Debug("Image '%s' ReadData(), size=%#x" % (self.GetPath(), len(self._data))) return self._data
def _RemoveOutputDir(): global outdir shutil.rmtree(outdir) tout.Debug("Deleted temporary directory '%s'" % outdir) outdir = None