示例#1
0
    def testNoValue(self):
        """Test the case where we have no value for the symbol

        This should produce -1 values for all thress symbols, taking up the
        first 16 bytes of the image.
        """
        entry = FakeEntry(20)
        section = FakeSection(sym_value=None)
        elf_fname = os.path.join(binman_dir, 'test', 'u_boot_binman_syms')
        syms = elf.LookupAndWriteSymbols(elf_fname, entry, section)
        self.assertEqual(
            tools.GetBytes(255, 16) + tools.GetBytes(ord('a'), 4), entry.data)
示例#2
0
 def ObtainContents(self):
     fname = tools.GetInputFilename('spl/u-boot-spl')
     bss_size = elf.GetSymbolAddress(fname, '__bss_size')
     if not bss_size:
         self.Raise('Expected __bss_size symbol in spl/u-boot-spl')
     self.SetContents(tools.GetBytes(0, bss_size))
     return True
示例#3
0
    def AddZeroProp(self, prop_name):
        """Add a new property to the device tree with an integer value of 0.

        Args:
            prop_name: Name of property
        """
        self.props[prop_name] = Prop(self, None, prop_name,
                                     tools.GetBytes(0, 4))
示例#4
0
    def GetData(self):
        section_data = b''

        for entry in self._entries.values():
            data = entry.GetData()
            base = self.pad_before + (entry.offset or 0) - self._skip_at_start
            pad = base - len(section_data)
            if pad > 0:
                section_data += tools.GetBytes(self._pad_byte, pad)
            section_data += data
        if self.size:
            pad = self.size - len(section_data)
            if pad > 0:
                section_data += tools.GetBytes(self._pad_byte, pad)
        self.Detail('GetData: %d entries, total size %#x' %
                    (len(self._entries), len(section_data)))
        return section_data
示例#5
0
    def GetData(self):
        section_data = tools.GetBytes(self._pad_byte, self.size)

        for entry in self._entries.values():
            data = entry.GetData()
            base = self.pad_before + entry.offset - self._skip_at_start
            section_data = (section_data[:base] + data +
                            section_data[base + len(data):])
        return section_data
示例#6
0
    def GetData(self):
        section_data = tools.GetBytes(self._pad_byte, self.size)

        for entry in self._entries.values():
            data = entry.GetData()
            base = self.pad_before + entry.offset - self._skip_at_start
            section_data = (section_data[:base] + data +
                            section_data[base + len(data):])
        self.Detail('GetData: %d entries, total size %#x' %
                    (len(self._entries), len(section_data)))
        return section_data
示例#7
0
    def _skip_to(self, fd, offset):
        """Write out pad bytes until a given offset

        Args:
            fd: File objext to write to
            offset: Offset to write to
        """
        if fd.tell() > offset:
            raise ValueError(
                'No space for data before offset %#x (current offset %#x)' %
                (offset, fd.tell()))
        fd.write(tools.GetBytes(self._erase_byte, offset - fd.tell()))
示例#8
0
def _pack_string(instr):
    """Pack a string to the required aligned size by adding padding

    Args:
        instr: String to process

    Returns:
        String with required padding (at least one 0x00 byte) at the end
    """
    val = tools.ToBytes(instr)
    pad_len = align_int(len(val) + 1, FILENAME_ALIGN)
    return val + tools.GetBytes(0, pad_len - len(val))
示例#9
0
    def AddEmptyProp(self, prop_name, len):
        """Add a property with a fixed data size, for filling in later

        The device tree is marked dirty so that the value will be written to
        the blob on the next sync.

        Args:
            prop_name: Name of property
            len: Length of data in property
        """
        value = tools.GetBytes(0, len)
        self.props[prop_name] = Prop(self, None, prop_name, value)
示例#10
0
    def _GetFdtmap(self):
        """Build an FDT map from the entries in the current image

        Returns:
            FDT map binary data
        """
        def _AddNode(node):
            """Add a node to the FDT map"""
            for pname, prop in node.props.items():
                fsw.property(pname, prop.bytes)
            for subnode in node.subnodes:
                with fsw.add_node(subnode.name):
                    _AddNode(subnode)

        data = state.GetFdtContents('fdtmap')[1]
        # If we have an fdtmap it means that we are using this as the
        # fdtmap for this image.
        if data is None:
            # Get the FDT data into an Fdt object
            data = state.GetFdtContents()[1]
            infdt = Fdt.FromData(data)
            infdt.Scan()

            # Find the node for the image containing the Fdt-map entry
            path = self.section.GetPath()
            self.Detail("Fdtmap: Using section '%s' (path '%s')" %
                        (self.section.name, path))
            node = infdt.GetNode(path)
            if not node:
                self.Raise("Internal error: Cannot locate node for path '%s'" %
                           path)

            # Build a new tree with all nodes and properties starting from that
            # node
            fsw = libfdt.FdtSw()
            fsw.finish_reservemap()
            with fsw.add_node(''):
                fsw.property_string('image-node', node.name)
                _AddNode(node)
            fdt = fsw.as_fdt()

            # Pack this new FDT and return its contents
            fdt.pack()
            outfdt = Fdt.FromData(fdt.as_bytearray())
            data = outfdt.GetContents()
        data = FDTMAP_MAGIC + tools.GetBytes(0, 8) + data
        return data
示例#11
0
    def ProcessContentsUpdate(self, data):
        """Update the contents of an entry, after the size is fixed

        This checks that the new data is the same size as the old. If the size
        has changed, this triggers a re-run of the packing algorithm.

        Args:
            data: Data to set to the contents (bytes)

        Raises:
            ValueError if the new data size is not the same as the old
        """
        size_ok = True
        new_size = len(data)
        if state.AllowEntryExpansion() and new_size > self.contents_size:
            # self.data will indicate the new size needed
            size_ok = False
        elif state.AllowEntryContraction() and new_size < self.contents_size:
            size_ok = False

        # If not allowed to change, try to deal with it or give up
        if size_ok:
            if new_size > self.contents_size:
                self.Raise('Cannot update entry size from %d to %d' %
                           (self.contents_size, new_size))

            # Don't let the data shrink. Pad it if necessary
            if size_ok and new_size < self.contents_size:
                data += tools.GetBytes(0, self.contents_size - new_size)

        if not size_ok:
            tout.Debug(
                "Entry '%s' size change from %s to %s" %
                (self._node.path, ToHex(self.contents_size), ToHex(new_size)))
        self.SetContents(data)
        return size_ok
示例#12
0
    def get_data_and_offset(self, offset=None, pad_byte=None):
        """Obtain the contents of the file, in CBFS format and the offset of
        the data within the file

        Returns:
            tuple:
                bytes representing the contents of this file, packed and aligned
                    for directly inserting into the final CBFS output
                offset to the file data from the start of the returned data.
        """
        name = _pack_string(self.name)
        hdr_len = len(name) + FILE_HEADER_LEN
        attr_pos = 0
        content = b''
        attr = b''
        pad = b''
        data = self.data
        if self.ftype == TYPE_STAGE:
            elf_data = elf.DecodeElf(data, self.base_address)
            content = struct.pack(STAGE_FORMAT, self.compress,
                                  elf_data.entry, elf_data.load,
                                  len(elf_data.data), elf_data.memsize)
            data = elf_data.data
        elif self.ftype == TYPE_RAW:
            orig_data = data
            if self.compress == COMPRESS_LZ4:
                data = tools.Compress(orig_data, 'lz4')
            elif self.compress == COMPRESS_LZMA:
                data = tools.Compress(orig_data, 'lzma')
            self.memlen = len(orig_data)
            self.data_len = len(data)
            attr = struct.pack(ATTR_COMPRESSION_FORMAT,
                               FILE_ATTR_TAG_COMPRESSION, ATTR_COMPRESSION_LEN,
                               self.compress, self.memlen)
        elif self.ftype == TYPE_EMPTY:
            data = tools.GetBytes(self.erase_byte, self.size)
        else:
            raise ValueError('Unknown type %#x when writing\n' % self.ftype)
        if attr:
            attr_pos = hdr_len
            hdr_len += len(attr)
        if self.cbfs_offset is not None:
            pad_len = self.cbfs_offset - offset - hdr_len
            if pad_len < 0:  # pragma: no cover
                # Test coverage of this is not available since this should never
                # happen. It indicates that get_header_len() provided an
                # incorrect value (too small) so that we decided that we could
                # put this file at the requested place, but in fact a previous
                # file extends far enough into the CBFS that this is not
                # possible.
                raise ValueError(
                    "Internal error: CBFS file '%s': Requested offset %#x but current output position is %#x"
                    % (self.name, self.cbfs_offset, offset))
            pad = tools.GetBytes(pad_byte, pad_len)
            hdr_len += pad_len

        # This is the offset of the start of the file's data,
        size = len(content) + len(data)
        hdr = struct.pack(FILE_HEADER_FORMAT, FILE_MAGIC, size, self.ftype,
                          attr_pos, hdr_len)

        # Do a sanity check of the get_header_len() function, to ensure that it
        # stays in lockstep with this function
        expected_len = self.get_header_len()
        actual_len = len(hdr + name + attr)
        if expected_len != actual_len:  # pragma: no cover
            # Test coverage of this is not available since this should never
            # happen. It probably indicates that get_header_len() is broken.
            raise ValueError(
                "Internal error: CBFS file '%s': Expected headers of %#x bytes, got %#d"
                % (self.name, expected_len, actual_len))
        return hdr + name + attr + pad + content + data, hdr_len
示例#13
0
 def testGetEmpty(self):
     """Tests the GetEmpty() function for the various supported types"""
     self.assertEqual(True, fdt.Prop.GetEmpty(fdt.TYPE_BOOL))
     self.assertEqual(chr(0), fdt.Prop.GetEmpty(fdt.TYPE_BYTE))
     self.assertEqual(tools.GetBytes(0, 4), fdt.Prop.GetEmpty(fdt.TYPE_INT))
     self.assertEqual('', fdt.Prop.GetEmpty(fdt.TYPE_STRING))
示例#14
0
 def __init__(self, contents_size):
     self.contents_size = contents_size
     self.data = tools.GetBytes(ord('a'), contents_size)
示例#15
0
 def ObtainContents(self):
     self.SetContents(tools.GetBytes(self.fill_value, self.size))
     return True