Example #1
0
 def BuildImage(self):
     """Write the image to a file"""
     fname = tools.GetOutputFilename(self._filename)
     tout.Info("Writing image to '%s'" % fname)
     with open(fname, 'wb') as fd:
         data = self.GetPaddedData()
         fd.write(data)
     tout.Info("Wrote %#x bytes" % len(data))
Example #2
0
 def ReadData(self, decomp=True):
     tout.Info("ReadData path='%s'" % self.GetPath())
     parent_data = self.section.ReadData(True)
     tout.Info(
         '%s: Reading data from offset %#x-%#x, size %#x' %
         (self.GetPath(), self.offset, self.offset + self.size, self.size))
     data = parent_data[self.offset:self.offset + self.size]
     return data
Example #3
0
 def ReadData(self, decomp=True):
     tout.Info("ReadData path='%s'" % self.GetPath())
     parent_data = self.section.ReadData(True)
     offset = self.offset - self.section._skip_at_start
     data = parent_data[offset:offset + self.size]
     tout.Info(
         '%s: Reading data from offset %#x-%#x (real %#x), size %#x, got %#x' %
               (self.GetPath(), self.offset, self.offset + self.size, offset,
                self.size, len(data)))
     return data
Example #4
0
def WriteEntry(image_fname, entry_path, data, do_compress=True,
               allow_resize=True, write_map=False):
    """Replace an entry in an image

    This replaces the data in a particular entry in an image. This size of the
    new data must match the size of the old data unless allow_resize is True.

    Args:
        image_fname: Image filename to process
        entry_path: Path to entry to extract
        data: Data to replace with
        do_compress: True to compress the data if needed, False if data is
            already compressed so should be used as is
        allow_resize: True to allow entries to change size (this does a re-pack
            of the entries), False to raise an exception
        write_map: True to write a map file

    Returns:
        Image object that was updated
    """
    tout.Info("Write entry '%s', file '%s'" % (entry_path, image_fname))
    image = Image.FromFile(image_fname)
    entry = image.FindEntryPath(entry_path)
    WriteEntryToImage(image, entry, data, do_compress=do_compress,
                      allow_resize=allow_resize, write_map=write_map)

    return image
Example #5
0
    def run_cmd_result(self, *args, binary=False, raise_on_error=True):
        """Run the bintool using command-line arguments

        Args:
            args (list of str): Arguments to provide, in addition to the bintool
                name
            binary (bool): True to return output as bytes instead of str
            raise_on_error (bool): True to raise a ValueError exception if the
                tool returns a non-zero return code

        Returns:
            CommandResult: Resulting output from the bintool, or None if the
                tool is not present
        """
        if self.name in self.missing_list:
            return None
        name = os.path.expanduser(self.name)  # Expand paths containing ~
        all_args = (name, ) + args
        env = tools.get_env_with_path()
        tout.Detail(f"bintool: {' '.join(all_args)}")
        result = command.RunPipe([all_args],
                                 capture=True,
                                 capture_stderr=True,
                                 env=env,
                                 raise_on_error=False,
                                 binary=binary)

        if result.return_code:
            # Return None if the tool was not found. In this case there is no
            # output from the tool and it does not appear on the path. We still
            # try to run it (as above) since RunPipe() allows faking the tool's
            # output
            if not any([result.stdout, result.stderr, tools.tool_find(name)]):
                tout.Info(f"bintool '{name}' not found")
                return None
            if raise_on_error:
                tout.Info(f"bintool '{name}' failed")
                raise ValueError("Error %d running '%s': %s" %
                                 (result.return_code, ' '.join(all_args),
                                  result.stderr or result.stdout))
        if result.stdout:
            tout.Debug(result.stdout)
        if result.stderr:
            tout.Debug(result.stderr)
        return result
Example #6
0
def WriteEntryToImage(image,
                      entry,
                      data,
                      do_compress=True,
                      allow_resize=True,
                      write_map=False):
    BeforeReplace(image, allow_resize)
    tout.Info('Writing data to %s' % entry.GetPath())
    ReplaceOneEntry(image, entry, data, do_compress, allow_resize)
    AfterReplace(image, allow_resize=allow_resize, write_map=write_map)
Example #7
0
def PrepareFromLoadedData(image):
    """Get device tree files ready for use with a loaded image

    Loaded images are different from images that are being created by binman,
    since there is generally already an fdtmap and we read the description from
    that. This provides the position and size of every entry in the image with
    no calculation required.

    This function uses the same output_fdt_info[] as Prepare(). It finds the
    device tree files, adds a reference to the fdtmap and sets the FDT path
    prefix to translate from the fdtmap (where the root node is the image node)
    to the normal device tree (where the image node is under a /binman node).

    Args:
        images: List of images being used
    """
    global output_fdt_info, main_dtb, fdt_path_prefix

    tout.Info('Preparing device trees')
    output_fdt_info.clear()
    fdt_path_prefix = ''
    output_fdt_info['fdtmap'] = [image.fdtmap_dtb, 'u-boot.dtb', None]
    main_dtb = None
    tout.Info("   Found device tree type 'fdtmap' '%s'" %
              image.fdtmap_dtb.name)
    for etype, value in image.GetFdts().items():
        entry, fname = value
        out_fname = tools.GetOutputFilename('%s.dtb' % entry.etype)
        tout.Info("   Found device tree type '%s' at '%s' path '%s'" %
                  (etype, out_fname, entry.GetPath()))
        entry._filename = entry.GetDefaultFilename()
        data = entry.ReadData()

        tools.WriteFile(out_fname, data)
        dtb = fdt.Fdt(out_fname)
        dtb.Scan()
        image_node = dtb.GetNode('/binman')
        if 'multiple-images' in image_node.props:
            image_node = dtb.GetNode('/binman/%s' % image.image_node)
        fdt_path_prefix = image_node.path
        output_fdt_info[etype] = [dtb, None, entry]
    tout.Info("   FDT path prefix '%s'" % fdt_path_prefix)
Example #8
0
def AfterReplace(image, allow_resize, write_map):
    """Handle write out an image after replacing entries in it

    Args:
        image: Image to write
        allow_resize: True to allow entries to change size (this does a re-pack
            of the entries), False to raise an exception
        write_map: True to write a map file
    """
    tout.Info('Processing image')
    ProcessImage(image, update_fdt=True, write_map=write_map,
                 get_contents=False, allow_resize=allow_resize)
Example #9
0
 def ReadChildData(self, child, decomp=True):
     tout.Debug("ReadChildData for child '%s'" % child.GetPath())
     parent_data = self.ReadData(True)
     offset = child.offset - self._skip_at_start
     tout.Debug("Extract for child '%s': offset %#x, skip_at_start %#x, result %#x" %
                (child.GetPath(), child.offset, self._skip_at_start, offset))
     data = parent_data[offset:offset + child.size]
     if decomp:
         indata = data
         data = tools.Decompress(indata, child.compress)
         if child.uncomp_size:
             tout.Info("%s: Decompressing data size %#x with algo '%s' to data size %#x" %
                         (child.GetPath(), len(indata), child.compress,
                         len(data)))
     return data
Example #10
0
 def ReadChildData(self, child, decomp=True, alt_format=None):
     tout.Debug(f"ReadChildData for child '{child.GetPath()}'")
     parent_data = self.ReadData(True, alt_format)
     offset = child.offset - self._skip_at_start
     tout.Debug("Extract for child '%s': offset %#x, skip_at_start %#x, result %#x" %
                (child.GetPath(), child.offset, self._skip_at_start, offset))
     data = parent_data[offset:offset + child.size]
     if decomp:
         indata = data
         data = tools.Decompress(indata, child.compress)
         if child.uncomp_size:
             tout.Info("%s: Decompressing data size %#x with algo '%s' to data size %#x" %
                         (child.GetPath(), len(indata), child.compress,
                         len(data)))
     if alt_format:
         new_data = child.GetAltFormat(data, alt_format)
         if new_data is not None:
             data = new_data
     return data
Example #11
0
    def UseExpanded(cls, node, etype, new_etype):
        """Check whether to use an expanded entry type

        This is called by Entry.Create() when it finds an expanded version of
        an entry type (e.g. 'u-boot-expanded'). If this method returns True then
        it will be used (e.g. in place of 'u-boot'). If it returns False, it is
        ignored.

        Args:
            node:     Node object containing information about the entry to
                      create
            etype:    Original entry type being used
            new_etype: New entry type proposed

        Returns:
            True to use this entry type, False to use the original one
        """
        tout.Info("Node '%s': etype '%s': %s selected" %
                  (node.path, etype, new_etype))
        return True
Example #12
0
def UpdateFile(infile, outfile, start_sym, end_sym, insert):
    tout.Notice(
        "Creating file '%s' with data length %#x (%d) between symbols '%s' and '%s'"
        % (outfile, len(insert), len(insert), start_sym, end_sym))
    syms = GetSymbolFileOffset(infile, [start_sym, end_sym])
    if len(syms) != 2:
        raise ValueError(
            "Expected two symbols '%s' and '%s': got %d: %s" %
            (start_sym, end_sym, len(syms), ','.join(syms.keys())))

    size = syms[end_sym].offset - syms[start_sym].offset
    if len(insert) > size:
        raise ValueError(
            "Not enough space in '%s' for data length %#x (%d); size is %#x (%d)"
            % (infile, len(insert), len(insert), size, size))

    data = tools.ReadFile(infile)
    newdata = data[:syms[start_sym].offset]
    newdata += insert + tools.GetBytes(0, size - len(insert))
    newdata += data[syms[end_sym].offset:]
    tools.WriteFile(outfile, newdata)
    tout.Info('Written to offset %#x' % syms[start_sym].offset)
Example #13
0
def ProcessImage(image, update_fdt, write_map, get_contents=True,
                 allow_resize=True, allow_missing=False):
    """Perform all steps for this image, including checking and # writing it.

    This means that errors found with a later image will be reported after
    earlier images are already completed and written, but that does not seem
    important.

    Args:
        image: Image to process
        update_fdt: True to update the FDT wth entry offsets, etc.
        write_map: True to write a map file
        get_contents: True to get the image contents from files, etc., False if
            the contents is already present
        allow_resize: True to allow entries to change size (this does a re-pack
            of the entries), False to raise an exception
        allow_missing: Allow blob_ext objects to be missing

    Returns:
        True if one or more external blobs are missing, False if all are present
    """
    if get_contents:
        image.SetAllowMissing(allow_missing)
        image.GetEntryContents()
    image.GetEntryOffsets()

    # We need to pack the entries to figure out where everything
    # should be placed. This sets the offset/size of each entry.
    # However, after packing we call ProcessEntryContents() which
    # may result in an entry changing size. In that case we need to
    # do another pass. Since the device tree often contains the
    # final offset/size information we try to make space for this in
    # AddMissingProperties() above. However, if the device is
    # compressed we cannot know this compressed size in advance,
    # since changing an offset from 0x100 to 0x104 (for example) can
    # alter the compressed size of the device tree. So we need a
    # third pass for this.
    passes = 5
    for pack_pass in range(passes):
        try:
            image.PackEntries()
            image.CheckSize()
            image.CheckEntries()
        except Exception as e:
            if write_map:
                fname = image.WriteMap()
                print("Wrote map file '%s' to show errors"  % fname)
            raise
        image.SetImagePos()
        if update_fdt:
            image.SetCalculatedProperties()
            for dtb_item in state.GetAllFdts():
                dtb_item.Sync()
                dtb_item.Flush()
        image.WriteSymbols()
        sizes_ok = image.ProcessEntryContents()
        if sizes_ok:
            break
        image.ResetForPack()
    tout.Info('Pack completed after %d pass(es)' % (pack_pass + 1))
    if not sizes_ok:
        image.Raise('Entries changed size after packing (tried %s passes)' %
                    passes)

    image.BuildImage()
    if write_map:
        image.WriteMap()
    missing_list = []
    image.CheckMissing(missing_list)
    if missing_list:
        tout.Warning("Image '%s' is missing external blobs and is non-functional: %s" %
                     (image.name, ' '.join([e.name for e in missing_list])))
    return bool(missing_list)
Example #14
0
def ReplaceEntries(image_fname, input_fname, indir, entry_paths,
                   do_compress=True, allow_resize=True, write_map=False):
    """Replace the data from one or more entries from input files

    Args:
        image_fname: Image filename to process
        input_fname: Single input ilename to use if replacing one file, None
            otherwise
        indir: Input directory to use (for any number of files), else None
        entry_paths: List of entry paths to extract
        do_compress: True if the input data is uncompressed and may need to be
            compressed if the entry requires it, False if the data is already
            compressed.
        write_map: True to write a map file

    Returns:
        List of EntryInfo records that were written
    """
    image = Image.FromFile(image_fname)

    # Replace an entry from a single file, as a special case
    if input_fname:
        if not entry_paths:
            raise ValueError('Must specify an entry path to read with -f')
        if len(entry_paths) != 1:
            raise ValueError('Must specify exactly one entry path to write with -f')
        entry = image.FindEntryPath(entry_paths[0])
        data = tools.ReadFile(input_fname)
        tout.Notice("Read %#x bytes from file '%s'" % (len(data), input_fname))
        WriteEntryToImage(image, entry, data, do_compress=do_compress,
                          allow_resize=allow_resize, write_map=write_map)
        return

    # Otherwise we will input from a path given by the entry path of each entry.
    # This means that files must appear in subdirectories if they are part of
    # a sub-section.
    einfos = image.GetListEntries(entry_paths)[0]
    tout.Notice("Replacing %d matching entries in image '%s'" %
                (len(einfos), image_fname))

    BeforeReplace(image, allow_resize)

    for einfo in einfos:
        entry = einfo.entry
        if entry.GetEntries():
            tout.Info("Skipping section entry '%s'" % entry.GetPath())
            continue

        path = entry.GetPath()[1:]
        fname = os.path.join(indir, path)

        if os.path.exists(fname):
            tout.Notice("Write entry '%s' from file '%s'" %
                        (entry.GetPath(), fname))
            data = tools.ReadFile(fname)
            ReplaceOneEntry(image, entry, data, do_compress, allow_resize)
        else:
            tout.Warning("Skipping entry '%s' from missing file '%s'" %
                         (entry.GetPath(), fname))

    AfterReplace(image, allow_resize=allow_resize, write_map=write_map)
    return image