def GetPaddedDataForEntry(self, entry, entry_data): """Get the data for an entry including any padding Gets the entry data and uses the section pad-byte value to add padding before and after as defined by the pad-before and pad-after properties. This does not consider alignment. Args: entry: Entry to check Returns: Contents of the entry along with any pad bytes before and after it (bytes) """ pad_byte = (entry._pad_byte if isinstance(entry, Entry_section) else self._pad_byte) data = bytearray() # Handle padding before the entry if entry.pad_before: data += tools.get_bytes(self._pad_byte, entry.pad_before) # Add in the actual entry data data += entry_data # Handle padding after the entry if entry.pad_after: data += tools.get_bytes(self._pad_byte, entry.pad_after) if entry.size: data += tools.get_bytes(pad_byte, entry.size - len(data)) self.Detail('GetPaddedDataForEntry: size %s' % to_hex_size(self.data)) return data
def testNoValue(self): """Test the case where we have no value for the symbol This should produce -1 values for all thress symbols, taking up the first 16 bytes of the image. """ entry = FakeEntry(24) section = FakeSection(sym_value=None) elf_fname = self.ElfTestFile('u_boot_binman_syms') elf.LookupAndWriteSymbols(elf_fname, entry, section) self.assertEqual( tools.get_bytes(255, 20) + tools.get_bytes(ord('a'), 4), entry.data)
def BuildSectionData(self, required): """Build FIT entry contents This adds the 'data' properties to the input ITB (Image-tree Binary) then runs mkimage to process it. Args: required (bool): True if the data must be present, False if it is OK to return None Returns: bytes: Contents of the section """ data = self._build_input() uniq = self.GetUniqueName() input_fname = tools.get_output_filename(f'{uniq}.itb') output_fname = tools.get_output_filename(f'{uniq}.fit') tools.write_file(input_fname, data) tools.write_file(output_fname, data) args = {} ext_offset = self._fit_props.get('fit,external-offset') if ext_offset is not None: args = { 'external': True, 'pad': fdt_util.fdt32_to_cpu(ext_offset.value) } if self.mkimage.run(reset_timestamp=True, output_fname=output_fname, **args) is None: # Bintool is missing; just use empty data as the output self.record_missing_bintool(self.mkimage) return tools.get_bytes(0, 1024) return tools.read_file(output_fname)
def ObtainContents(self): gbb = 'gbb.bin' fname = tools.get_output_filename(gbb) if not self.size: self.Raise('GBB must have a fixed size') gbb_size = self.size bmpfv_size = gbb_size - 0x2180 if bmpfv_size < 0: self.Raise('GBB is too small (minimum 0x2180 bytes)') keydir = tools.get_input_filename(self.keydir) stdout = self.futility.gbb_create(fname, [0x100, 0x1000, bmpfv_size, 0x1000]) if stdout is not None: stdout = self.futility.gbb_set( fname, hwid=self.hardware_id, rootkey='%s/root_key.vbpubk' % keydir, recoverykey='%s/recovery_key.vbpubk' % keydir, flags=self.gbb_flags, bmpfv=tools.get_input_filename(self.bmpblk)) if stdout is not None: self.SetContents(tools.read_file(fname)) else: # Bintool is missing; just use the required amount of zero data self.record_missing_bintool(self.futility) self.SetContents(tools.get_bytes(0, gbb_size)) return True
def ObtainContents(self): fname = tools.get_input_filename('tpl/u-boot-tpl') bss_size = elf.GetSymbolAddress(fname, '__bss_size') if not bss_size: self.Raise('Expected __bss_size symbol in tpl/u-boot-tpl') self.SetContents(tools.get_bytes(0, bss_size)) return True
def AddZeroProp(self, prop_name): """Add a new property to the device tree with an integer value of 0. Args: prop_name: Name of property """ self.props[prop_name] = Prop(self, None, prop_name, tools.get_bytes(0, 4))
def AddEmptyProp(self, prop_name, len): """Add a property with a fixed data size, for filling in later The device tree is marked dirty so that the value will be written to the blob on the next sync. Args: prop_name: Name of property len: Length of data in property """ value = tools.get_bytes(0, len) self.props[prop_name] = Prop(self, None, prop_name, value)
def _pack_string(instr): """Pack a string to the required aligned size by adding padding Args: instr: String to process Returns: String with required padding (at least one 0x00 byte) at the end """ val = tools.to_bytes(instr) pad_len = align_int(len(val) + 1, FILENAME_ALIGN) return val + tools.get_bytes(0, pad_len - len(val))
def _skip_to(self, fd, offset): """Write out pad bytes until a given offset Args: fd: File objext to write to offset: Offset to write to """ if fd.tell() > offset: raise ValueError( 'No space for data before offset %#x (current offset %#x)' % (offset, fd.tell())) fd.write(tools.get_bytes(self._erase_byte, offset - fd.tell()))
def _BuildIfwi(self): """Build the contents of the IFWI and write it to the 'data' property""" # Create the IFWI file if needed if self._convert_fit: inname = self._pathname outname = tools.get_output_filename('ifwi.bin') if self.ifwitool.create_ifwi(inname, outname) is None: # Bintool is missing; just create a zeroed ifwi.bin self.record_missing_bintool(self.ifwitool) self.SetContents(tools.get_bytes(0, 1024)) self._filename = 'ifwi.bin' self._pathname = outname else: # Provide a different code path here to ensure we have test coverage outname = self._pathname # Delete OBBP if it is there, then add the required new items if self.ifwitool.delete_subpart(outname, 'OBBP') is None: # Bintool is missing; just use zero data self.record_missing_bintool(self.ifwitool) self.SetContents(tools.get_bytes(0, 1024)) return True for entry in self._ifwi_entries.values(): # First get the input data and put it in a file data = entry.GetPaddedData() uniq = self.GetUniqueName() input_fname = tools.get_output_filename('input.%s' % uniq) tools.write_file(input_fname, data) # At this point we know that ifwitool is present, so we don't need # to check for None here self.ifwitool.add_subpart(outname, entry._ifwi_subpart, entry._ifwi_entry_name, input_fname, entry._ifwi_replace) self.ReadBlobContents() return True
def testAddMore(self): """Test various other methods for adding and setting properties""" self.node.AddZeroProp('one') self.dtb.Sync(auto_resize=True) data = self.fdt.getprop(self.node.Offset(), 'one') self.assertEqual(0, fdt32_to_cpu(data)) self.node.SetInt('one', 1) self.dtb.Sync(auto_resize=False) data = self.fdt.getprop(self.node.Offset(), 'one') self.assertEqual(1, fdt32_to_cpu(data)) val = 1234 self.node.AddInt('integer', val) self.dtb.Sync(auto_resize=True) data = self.fdt.getprop(self.node.Offset(), 'integer') self.assertEqual(val, fdt32_to_cpu(data)) val = '123' + chr(0) + '456' self.node.AddString('string', val) self.dtb.Sync(auto_resize=True) data = self.fdt.getprop(self.node.Offset(), 'string') self.assertEqual(tools.to_bytes(val) + b'\0', data) self.fdt.pack() self.node.SetString('string', val + 'x') with self.assertRaises(libfdt.FdtException) as e: self.dtb.Sync(auto_resize=False) self.assertIn('FDT_ERR_NOSPACE', str(e.exception)) self.node.SetString('string', val[:-1]) prop = self.node.props['string'] prop.SetData(tools.to_bytes(val)) self.dtb.Sync(auto_resize=False) data = self.fdt.getprop(self.node.Offset(), 'string') self.assertEqual(tools.to_bytes(val), data) self.node.AddEmptyProp('empty', 5) self.dtb.Sync(auto_resize=True) prop = self.node.props['empty'] prop.SetData(tools.to_bytes(val)) self.dtb.Sync(auto_resize=False) data = self.fdt.getprop(self.node.Offset(), 'empty') self.assertEqual(tools.to_bytes(val), data) self.node.SetData('empty', b'123') self.assertEqual(b'123', prop.bytes) # Trying adding a lot of data at once self.node.AddData('data', tools.get_bytes(65, 20000)) self.dtb.Sync(auto_resize=True)
def __init__(self, uuid, offset, size, flags): self.uuid = uuid self.offset = offset self.size = size self.flags = flags self.fip_type = None self.data = None self.valid = uuid != tools.get_bytes(0, UUID_LEN) if self.valid: # Look up the friendly name matches = {val for (key, val) in FIP_TYPES.items() if val.uuid == uuid} if len(matches) == 1: self.fip_type = matches.pop().name
def ReadBlobContents(self): indata = tools.read_file(self._pathname) data = b'' for line in indata.splitlines(): data += line + b'\0' data += b'\0' pad = self.size - len(data) - 5 if pad < 0: self.Raise( "'u-boot-env' entry too small to hold data (need %#x more bytes)" % -pad) data += tools.get_bytes(self.fill_value, pad) crc = zlib.crc32(data) buf = struct.pack('<I', crc) + b'\x01' + data self.SetContents(buf) return True
def _GetFdtmap(self): """Build an FDT map from the entries in the current image Returns: FDT map binary data """ def _AddNode(node): """Add a node to the FDT map""" for pname, prop in node.props.items(): fsw.property(pname, prop.bytes) for subnode in node.subnodes: with fsw.add_node(subnode.name): _AddNode(subnode) data = state.GetFdtContents('fdtmap')[1] # If we have an fdtmap it means that we are using this as the # fdtmap for this image. if data is None: # Get the FDT data into an Fdt object data = state.GetFdtContents()[1] infdt = Fdt.FromData(data) infdt.Scan() # Find the node for the image containing the Fdt-map entry path = self.section.GetPath() self.Detail("Fdtmap: Using section '%s' (path '%s')" % (self.section.name, path)) node = infdt.GetNode(path) if not node: self.Raise("Internal error: Cannot locate node for path '%s'" % path) # Build a new tree with all nodes and properties starting from that # node fsw = libfdt.FdtSw() fsw.finish_reservemap() with fsw.add_node(''): fsw.property_string('image-node', node.name) _AddNode(node) fdt = fsw.as_fdt() # Pack this new FDT and return its contents fdt.pack() outfdt = Fdt.FromData(fdt.as_bytearray()) data = outfdt.GetContents() data = FDTMAP_MAGIC + tools.get_bytes(0, 8) + data return data
def UpdateFile(infile, outfile, start_sym, end_sym, insert): tout.notice("Creating file '%s' with data length %#x (%d) between symbols '%s' and '%s'" % (outfile, len(insert), len(insert), start_sym, end_sym)) syms = GetSymbolFileOffset(infile, [start_sym, end_sym]) if len(syms) != 2: raise ValueError("Expected two symbols '%s' and '%s': got %d: %s" % (start_sym, end_sym, len(syms), ','.join(syms.keys()))) size = syms[end_sym].offset - syms[start_sym].offset if len(insert) > size: raise ValueError("Not enough space in '%s' for data length %#x (%d); size is %#x (%d)" % (infile, len(insert), len(insert), size, size)) data = tools.read_file(infile) newdata = data[:syms[start_sym].offset] newdata += insert + tools.get_bytes(0, size - len(insert)) newdata += data[syms[end_sym].offset:] tools.write_file(outfile, newdata) tout.info('Written to offset %#x' % syms[start_sym].offset)
def BuildSectionData(self, required): """Build the contents of a section This places all entries at the right place, dealing with padding before and after entries. It does not do padding for the section itself (the pad-before and pad-after properties in the section items) since that is handled by the parent section. This should be overridden by subclasses which want to build their own data structure for the section. Args: required: True if the data must be present, False if it is OK to return None Returns: Contents of the section (bytes) """ section_data = bytearray() for entry in self._entries.values(): entry_data = entry.GetData(required) # This can happen when this section is referenced from a collection # earlier in the image description. See testCollectionSection(). if not required and entry_data is None: return None data = self.GetPaddedDataForEntry(entry, entry_data) # Handle empty space before the entry pad = (entry.offset or 0) - self._skip_at_start - len(section_data) if pad > 0: section_data += tools.get_bytes(self._pad_byte, pad) # Add in the actual entry data section_data += data self.Detail('GetData: %d entries, total size %#x' % (len(self._entries), len(section_data))) return self.CompressData(section_data)
def ProcessContentsUpdate(self, data): """Update the contents of an entry, after the size is fixed This checks that the new data is the same size as the old. If the size has changed, this triggers a re-run of the packing algorithm. Args: data: Data to set to the contents (bytes) Raises: ValueError if the new data size is not the same as the old """ size_ok = True new_size = len(data) if state.AllowEntryExpansion() and new_size > self.contents_size: # self.data will indicate the new size needed size_ok = False elif state.AllowEntryContraction() and new_size < self.contents_size: size_ok = False # If not allowed to change, try to deal with it or give up if size_ok: if new_size > self.contents_size: self.Raise('Cannot update entry size from %d to %d' % (self.contents_size, new_size)) # Don't let the data shrink. Pad it if necessary if size_ok and new_size < self.contents_size: data += tools.get_bytes(0, self.contents_size - new_size) if not size_ok: tout.debug("Entry '%s' size change from %s to %s" % (self._node.path, to_hex( self.contents_size), to_hex(new_size))) self.SetContents(data) return size_ok
def GetVblock(self, required): """Get the contents of this entry Args: required: True if the data must be present, False if it is OK to return None Returns: bytes content of the entry, which is the signed vblock for the provided data """ # Join up the data files to be signed input_data = self.GetContents(required) if input_data is None: return None uniq = self.GetUniqueName() output_fname = tools.get_output_filename('vblock.%s' % uniq) input_fname = tools.get_output_filename('input.%s' % uniq) tools.write_file(input_fname, input_data) prefix = self.keydir + '/' stdout = self.futility.sign_firmware(vblock=output_fname, keyblock=prefix + self.keyblock, signprivate=prefix + self.signprivate, version=f'{self.version,}', firmware=input_fname, kernelkey=prefix + self.kernelkey, flags=f'{self.preamble_flags}') if stdout is not None: data = tools.read_file(output_fname) else: # Bintool is missing; just use 4KB of zero data self.record_missing_bintool(self.futility) data = tools.get_bytes(0, 4096) return data
def ObtainContents(self): self.SetContents(tools.get_bytes(self.fill_value, self.size)) return True
def testGetEmpty(self): """Tests the GetEmpty() function for the various supported types""" self.assertEqual(True, fdt.Prop.GetEmpty(Type.BOOL)) self.assertEqual(chr(0), fdt.Prop.GetEmpty(Type.BYTE)) self.assertEqual(tools.get_bytes(0, 4), fdt.Prop.GetEmpty(Type.INT)) self.assertEqual('', fdt.Prop.GetEmpty(Type.STRING))
def test_read_segments_bad_data(self): """Test for read_loadable_segments() with an invalid ELF file""" fname = self.ElfTestFile('embed_data') with self.assertRaises(ValueError) as e: elf.read_loadable_segments(tools.get_bytes(100, 100)) self.assertIn('Magic number does not match', str(e.exception))
def __init__(self, contents_size): self.contents_size = contents_size self.data = tools.get_bytes(ord('a'), contents_size)
def get_data_and_offset(self, offset=None, pad_byte=None): """Obtain the contents of the file, in CBFS format and the offset of the data within the file Returns: tuple: bytes representing the contents of this file, packed and aligned for directly inserting into the final CBFS output offset to the file data from the start of the returned data. """ name = _pack_string(self.name) hdr_len = len(name) + FILE_HEADER_LEN attr_pos = 0 content = b'' attr = b'' pad = b'' data = self.data if self.ftype == TYPE_STAGE: elf_data = elf.DecodeElf(data, self.base_address) content = struct.pack(STAGE_FORMAT, self.compress, elf_data.entry, elf_data.load, len(elf_data.data), elf_data.memsize) data = elf_data.data elif self.ftype == TYPE_RAW: orig_data = data if self.compress == COMPRESS_LZ4: data = comp_util.compress(orig_data, 'lz4', with_header=False) elif self.compress == COMPRESS_LZMA: data = comp_util.compress(orig_data, 'lzma', with_header=False) self.memlen = len(orig_data) self.data_len = len(data) attr = struct.pack(ATTR_COMPRESSION_FORMAT, FILE_ATTR_TAG_COMPRESSION, ATTR_COMPRESSION_LEN, self.compress, self.memlen) elif self.ftype == TYPE_EMPTY: data = tools.get_bytes(self.erase_byte, self.size) else: raise ValueError('Unknown type %#x when writing\n' % self.ftype) if attr: attr_pos = hdr_len hdr_len += len(attr) if self.cbfs_offset is not None: pad_len = self.cbfs_offset - offset - hdr_len if pad_len < 0: # pragma: no cover # Test coverage of this is not available since this should never # happen. It indicates that get_header_len() provided an # incorrect value (too small) so that we decided that we could # put this file at the requested place, but in fact a previous # file extends far enough into the CBFS that this is not # possible. raise ValueError( "Internal error: CBFS file '%s': Requested offset %#x but current output position is %#x" % (self.name, self.cbfs_offset, offset)) pad = tools.get_bytes(pad_byte, pad_len) hdr_len += pad_len # This is the offset of the start of the file's data, size = len(content) + len(data) hdr = struct.pack(FILE_HEADER_FORMAT, FILE_MAGIC, size, self.ftype, attr_pos, hdr_len) # Do a sanity check of the get_header_len() function, to ensure that it # stays in lockstep with this function expected_len = self.get_header_len() actual_len = len(hdr + name + attr) if expected_len != actual_len: # pragma: no cover # Test coverage of this is not available since this should never # happen. It probably indicates that get_header_len() is broken. raise ValueError( "Internal error: CBFS file '%s': Expected headers of %#x bytes, got %#d" % (self.name, expected_len, actual_len)) return hdr + name + attr + pad + content + data, hdr_len