def parse_atf_source(srcdir, dstfile, oldfile): """parse_atf_source(): Parse the ATF source tree and update this file Args: srcdir (str): Path to 'arm-trusted-firmware' directory. Get this from: https://github.com/ARM-software/arm-trusted-firmware.git dstfile (str): File to write new code to, if an update is needed oldfile (str): Python source file to compare against Raises: ValueError: srcdir readme.rst is missing or the first line does not match what is expected """ # We expect a readme file readme_fname = os.path.join(srcdir, 'readme.rst') if not os.path.exists(readme_fname): raise ValueError( f"Expected file '{readme_fname}' - try using -s to specify the " 'arm-trusted-firmware directory') readme = tools.read_file(readme_fname, binary=False) first_line = 'Trusted Firmware-A' if readme.splitlines()[0] != first_line: raise ValueError(f"'{readme_fname}' does not start with '{first_line}'") macros = parse_macros(srcdir) names = parse_names(srcdir) output = create_code_output(macros, names) orig = tools.read_file(oldfile, binary=False) re_fip_list = re.compile(r'(.*FIP_TYPE_LIST = \[).*?( ] # end.*)', re.S) mat = re_fip_list.match(orig) new_code = mat.group(1) + '\n' + output + mat.group(2) if mat else output if new_code == orig: print(f"Existing code in '{oldfile}' is up-to-date") else: tools.write_file(dstfile, new_code, binary=False) print(f'Needs update, try:\n\tmeld {dstfile} {oldfile}')
def GetFdtContents(etype='u-boot-dtb'): """Looks up the FDT pathname and contents This is used to obtain the Fdt pathname and contents when needed by an entry. It supports a 'fake' dtb, allowing tests to substitute test data for the real dtb. Args: etype: Entry type to look up (e.g. 'u-boot.dtb'). Returns: tuple: pathname to Fdt Fdt data (as bytes) """ if etype not in output_fdt_info: return None, None if not use_fake_dtb: pathname = GetFdtPath(etype) data = GetFdtForEtype(etype).GetContents() else: fname = output_fdt_info[etype][1] pathname = tools.get_input_filename(fname) data = tools.read_file(pathname) return pathname, data
def test_all_bintools(self): """Test that all bintools can handle all available fetch types""" def handle_download(_): """Take the tools.download() function by writing a file""" tools.write_file(fname, expected) return fname, dirname def fake_run(*cmd): if cmd[0] == 'make': # See Bintool.build_from_git() tmpdir = cmd[2] self.fname = os.path.join(tmpdir, 'pathname') tools.write_file(self.fname, b'hello') expected = b'this is a test' dirname = os.path.join(self._indir, 'download_dir') os.mkdir(dirname) fname = os.path.join(dirname, 'downloaded') with unittest.mock.patch.object(tools, 'run', side_effect=fake_run): with unittest.mock.patch.object(tools, 'download', side_effect=handle_download): with test_util.capture_sys_output() as _: for name in Bintool.get_tool_list(): btool = Bintool.create(name) for method in range(bintool.FETCH_COUNT): result = btool.fetch(method) self.assertTrue(result is not False) if result is not True and result is not None: result_fname, _ = result self.assertTrue(os.path.exists(result_fname)) data = tools.read_file(result_fname) self.assertEqual(expected, data) os.remove(result_fname)
def ReadFileContents(self, pathname): """Read blob contents into memory This function compresses the data before returning if needed. We assume the data is small enough to fit into memory. If this is used for large filesystem image that might not be true. In that case, Image.BuildImage() could be adjusted to use a new Entry method which can read in chunks. Then we could copy the data in chunks and avoid reading it all at once. For now this seems like an unnecessary complication. Args: pathname (str): Pathname to read from Returns: bytes: Data read """ state.TimingStart('read') indata = tools.read_file(pathname) state.TimingAccum('read') state.TimingStart('compress') data = self.CompressData(indata) state.TimingAccum('compress') return data
def test_cbfs_stage(self): """Tests handling of a Coreboot Filesystem (CBFS)""" if not elf.ELF_TOOLS: self.skipTest('Python elftools not available') elf_fname = os.path.join(self._indir, 'cbfs-stage.elf') elf.MakeElf(elf_fname, U_BOOT_DATA, U_BOOT_DTB_DATA) size = 0xb0 cbw = CbfsWriter(size) cbw.add_file_stage('u-boot', tools.read_file(elf_fname)) data = cbw.get_data() cbfs = self._check_hdr(data, size) load = 0xfef20000 entry = load + 2 cfile = self._check_uboot(cbfs, cbfs_util.TYPE_STAGE, offset=0x28, data=U_BOOT_DATA + U_BOOT_DTB_DATA) self.assertEqual(entry, cfile.entry) self.assertEqual(load, cfile.load) self.assertEqual( len(U_BOOT_DATA) + len(U_BOOT_DTB_DATA), cfile.data_len) # Compare against what cbfstool creates if self.have_cbfstool: cbfs_fname = os.path.join(self._indir, 'test.cbfs') self.cbfstool.create_new(cbfs_fname, size) self.cbfstool.add_stage(cbfs_fname, 'u-boot', elf_fname) self._compare_expected_cbfs(data, cbfs_fname)
def ObtainContents(self): gbb = 'gbb.bin' fname = tools.get_output_filename(gbb) if not self.size: self.Raise('GBB must have a fixed size') gbb_size = self.size bmpfv_size = gbb_size - 0x2180 if bmpfv_size < 0: self.Raise('GBB is too small (minimum 0x2180 bytes)') keydir = tools.get_input_filename(self.keydir) stdout = self.futility.gbb_create(fname, [0x100, 0x1000, bmpfv_size, 0x1000]) if stdout is not None: stdout = self.futility.gbb_set( fname, hwid=self.hardware_id, rootkey='%s/root_key.vbpubk' % keydir, recoverykey='%s/recovery_key.vbpubk' % keydir, flags=self.gbb_flags, bmpfv=tools.get_input_filename(self.bmpblk)) if stdout is not None: self.SetContents(tools.read_file(fname)) else: # Bintool is missing; just use the required amount of zero data self.record_missing_bintool(self.futility) self.SetContents(tools.get_bytes(0, gbb_size)) return True
def BuildSectionData(self, required): """Build FIT entry contents This adds the 'data' properties to the input ITB (Image-tree Binary) then runs mkimage to process it. Args: required (bool): True if the data must be present, False if it is OK to return None Returns: bytes: Contents of the section """ data = self._build_input() uniq = self.GetUniqueName() input_fname = tools.get_output_filename(f'{uniq}.itb') output_fname = tools.get_output_filename(f'{uniq}.fit') tools.write_file(input_fname, data) tools.write_file(output_fname, data) args = {} ext_offset = self._fit_props.get('fit,external-offset') if ext_offset is not None: args = { 'external': True, 'pad': fdt_util.fdt32_to_cpu(ext_offset.value) } if self.mkimage.run(reset_timestamp=True, output_fname=output_fname, **args) is None: # Bintool is missing; just use empty data as the output self.record_missing_bintool(self.mkimage) return tools.get_bytes(0, 1024) return tools.read_file(output_fname)
def test_fetch_pass_fail(self): """Test fetching multiple tools with some passing and some failing""" def handle_download(_): """Take the tools.download() function by writing a file""" if self.seq: raise urllib.error.URLError('not found') self.seq += 1 tools.write_file(fname, expected) return fname, dirname expected = b'this is a test' dirname = os.path.join(self._indir, 'download_dir') os.mkdir(dirname) fname = os.path.join(dirname, 'downloaded') destdir = os.path.join(self._indir, 'dest_dir') os.mkdir(destdir) dest_fname = os.path.join(destdir, '_testing') self.seq = 0 with unittest.mock.patch.object(bintool, 'DOWNLOAD_DESTDIR', destdir): with unittest.mock.patch.object(tools, 'download', side_effect=handle_download): with test_util.capture_sys_output() as (stdout, _): Bintool.fetch_tools(bintool.FETCH_ANY, ['_testing'] * 2) self.assertTrue(os.path.exists(dest_fname)) data = tools.read_file(dest_fname) self.assertEqual(expected, data) lines = stdout.getvalue().splitlines() self.assertTrue(len(lines) > 2) self.assertEqual('Tools fetched: 1: _testing', lines[-2]) self.assertEqual('Failures: 1: _testing', lines[-1])
def check_cfg_file(fname, adjust_cfg): """Check that a config file has been adjusted according to adjust_cfg Args: fname (str): Filename of .config file to change adjust_cfg (dict of str): Changes to make to .config file before building: key: str config to change, without the CONFIG_ prefix, e.g. FRED value: str change to make (C is config option without prefix): C to enable C ~C to disable C C=val to set the value of C (val must have quotes if C is a string Kconfig) Returns: str: None if OK, else an error string listing the problems """ lines = tools.read_file(fname, binary=False).splitlines() bad_cfgs = check_cfg_lines(lines, adjust_cfg) if bad_cfgs: out = [f'{cfg:20} {line}' for cfg, line in bad_cfgs] content = '\\n'.join(out) return f''' Some CONFIG adjustments did not take effect. This may be because the request CONFIGs do not exist or conflict with others. Failed adjustments: {content} ''' return None
def parse_names(srcdir): """parse_names: Parse the tbbr_config.c file Args: srcdir (str): 'arm-trusted-firmware' source directory Returns: tuple: dict of entries: key: UUID macro, e.g. 'UUID_NON_TRUSTED_FIRMWARE_BL33' tuple: entry information Description of entry, e.g. 'Non-Trusted Firmware BL33' UUID macro, e.g. 'UUID_NON_TRUSTED_FIRMWARE_BL33' Name of entry, e.g. 'nt-fw' Raises: ValueError: the file cannot be parsed """ # Extract the .name, .uuid and .cmdline_name values re_data = re.compile(r'\.name = "([^"]*)",\s*\.uuid = (UUID_\w*),\s*\.cmdline_name = "([^"]+)"', re.S) fname = os.path.join(srcdir, 'tools/fiptool/tbbr_config.c') data = tools.read_file(fname, binary=False) # Example entry: # { # .name = "Secure Payload BL32 Extra2 (Trusted OS Extra2)", # .uuid = UUID_SECURE_PAYLOAD_BL32_EXTRA2, # .cmdline_name = "tos-fw-extra2" # }, mat = re_data.findall(data) if not mat: raise ValueError(f'{fname}: Cannot parse file') names = {uuid: (desc, uuid, name) for desc, uuid, name in mat} return names
def ReadBlobContents(self): if self._strip: uniq = self.GetUniqueName() out_fname = tools.get_output_filename('%s.stripped' % uniq) tools.write_file(out_fname, tools.read_file(self._pathname)) tools.run('strip', out_fname) self._pathname = out_fname super().ReadBlobContents() return True
def test_read_segments_fail(self): """Test for read_segments() without elftools""" try: old_val = elf.ELF_TOOLS elf.ELF_TOOLS = False fname = self.ElfTestFile('embed_data') with self.assertRaises(ValueError) as e: elf.read_segments(tools.read_file(fname)) self.assertIn('Python elftools package is not available', str(e.exception)) finally: elf.ELF_TOOLS = old_val
def _gen_fdt_nodes(base_node, node, depth, in_images): """Generate FDT nodes This creates one node for each member of self._fdts using the provided template. If a property value contains 'NAME' it is replaced with the filename of the FDT. If a property value contains SEQ it is replaced with the node sequence number, where 1 is the first. Args: node (None): Generator node to process depth: Current node depth (0 is the base 'fit' node) in_images: True if this is inside the 'images' node, so that 'data' properties should be generated """ if self._fdts: # Generate nodes for each FDT for seq, fdt_fname in enumerate(self._fdts): node_name = node.name[1:].replace('SEQ', str(seq + 1)) fname = tools.get_input_filename(fdt_fname + '.dtb') with fsw.add_node(node_name): for pname, prop in node.props.items(): if pname == 'fit,loadables': val = '\0'.join(self._loadables) + '\0' fsw.property('loadables', val.encode('utf-8')) elif pname == 'fit,operation': pass elif pname.startswith('fit,'): self._raise_subnode( node, f"Unknown directive '{pname}'") else: val = prop.bytes.replace( b'NAME', tools.to_bytes(fdt_fname)) val = val.replace(b'SEQ', tools.to_bytes(str(seq + 1))) fsw.property(pname, val) # Add data for 'images' nodes (but not 'config') if depth == 1 and in_images: fsw.property('data', tools.read_file(fname)) for subnode in node.subnodes: with fsw.add_node(subnode.name): _add_node(node, depth + 1, subnode) else: if self._fdts is None: if self._fit_list_prop: self.Raise( 'Generator node requires ' f"'{self._fit_list_prop.value}' entry argument") else: self.Raise( "Generator node requires 'fit,fdt-list' property")
def test_get_file_offset(self): """Test GetFileOffset() gives the correct file offset for a symbol""" fname = self.ElfTestFile('embed_data') syms = elf.GetSymbols(fname, ['embed']) addr = syms['embed'].address offset = elf.GetFileOffset(fname, addr) data = tools.read_file(fname) # Just use the first 4 bytes and assume it is little endian embed_data = data[offset:offset + 4] embed_value = struct.unpack('<I', embed_data)[0] self.assertEqual(0x1234, embed_value)
def test_read_segments_fail(self): """Test for read_loadable_segments() without elftools""" try: old_val = elf.ELF_TOOLS elf.ELF_TOOLS = False fname = self.ElfTestFile('embed_data') with self.assertRaises(ValueError) as e: elf.read_loadable_segments(tools.read_file(fname)) self.assertIn("Python: No module named 'elftools'", str(e.exception)) finally: elf.ELF_TOOLS = old_val
def testEmbedData(self): """Test for the GetSymbolFileOffset() function""" if not elf.ELF_TOOLS: self.skipTest('Python elftools not available') fname = self.ElfTestFile('embed_data') offset = elf.GetSymbolFileOffset(fname, ['embed_start', 'embed_end']) start = offset['embed_start'].offset end = offset['embed_end'].offset data = tools.read_file(fname) embed_data = data[start:end] expect = struct.pack('<III', 0x1234, 0x5678, 0) self.assertEqual(expect, embed_data)
def FromFile(cls, fname): """Convert an image file into an Image for use in binman Args: fname: Filename of image file to read Returns: Image object on success Raises: ValueError if something goes wrong """ data = tools.read_file(fname) size = len(data) # First look for an image header pos = image_header.LocateHeaderOffset(data) if pos is None: # Look for the FDT map pos = fdtmap.LocateFdtmap(data) if pos is None: raise ValueError('Cannot find FDT map in image') # We don't know the FDT size, so check its header first probe_dtb = fdt.Fdt.FromData(data[pos + fdtmap.FDTMAP_HDR_LEN:pos + 256]) dtb_size = probe_dtb.GetFdtObj().totalsize() fdtmap_data = data[pos:pos + dtb_size + fdtmap.FDTMAP_HDR_LEN] fdt_data = fdtmap_data[fdtmap.FDTMAP_HDR_LEN:] out_fname = tools.get_output_filename('fdtmap.in.dtb') tools.write_file(out_fname, fdt_data) dtb = fdt.Fdt(out_fname) dtb.Scan() # Return an Image with the associated nodes root = dtb.GetRoot() image = Image('image', root, copy_to_orig=False, ignore_missing=True, missing_etype=True, generate=False) image.image_node = fdt_util.GetString(root, 'image-node', 'image') image.fdtmap_dtb = dtb image.fdtmap_data = fdtmap_data image._data = data image._filename = fname image.image_name, _ = os.path.splitext(fname) return image
def ObtainContents(self): data, input_fname, uniq = self.collect_contents_to_file( self._mkimage_entries.values(), 'mkimage') if data is False: return False output_fname = tools.get_output_filename('mkimage-out.%s' % uniq) if self.mkimage.run_cmd('-d', input_fname, *self._args, output_fname) is not None: self.SetContents(tools.read_file(output_fname)) else: # Bintool is missing; just use the input data as the output self.record_missing_bintool(self.mkimage) self.SetContents(data) return True
def show_event_spy_list(fname, endian): """Show a the event-spy- list from a U-Boot image Args: fname (str): Filename of ELF file endian (str): Endianness to use ('little', 'big', 'auto') """ syms = elf.GetSymbolFileOffset(fname, [PREFIX]) data = tools.read_file(fname) print('%-20s %-30s %s' % ('Event type', 'Id', 'Source location')) print('%-20s %-30s %s' % ('-' * 20, '-' * 30, '-' * 30)) for name, sym in syms.items(): m_evtype = RE_EVTYPE.search(name) evtype = m_evtype.group(1) show_sym(fname, data, endian, evtype, sym)
def GetVersion(path=OUR_PATH): """Get the version string for binman Args: path: Path to 'version' file Returns: str: String version, e.g. 'v2021.10' """ version_fname = os.path.join(path, 'version') if os.path.exists(version_fname): version = tools.read_file(version_fname, binary=False) else: version = '(unreleased)' return version
def ReadBlobContents(self): indata = tools.read_file(self._pathname) data = b'' for line in indata.splitlines(): data += line + b'\0' data += b'\0' pad = self.size - len(data) - 5 if pad < 0: self.Raise( "'u-boot-env' entry too small to hold data (need %#x more bytes)" % -pad) data += tools.get_bytes(self.fill_value, pad) crc = zlib.crc32(data) buf = struct.pack('<I', crc) + b'\x01' + data self.SetContents(buf) return True
def decompress(self, indata): """Decompress data with lzma_alone Args: indata (bytes): Data to decompress Returns: bytes: Decompressed data """ with tempfile.NamedTemporaryFile(prefix='decomp.tmp', dir=tools.get_output_dir()) as inf: tools.write_file(inf.name, indata) with tempfile.NamedTemporaryFile(prefix='compo.otmp', dir=tools.get_output_dir()) as outf: args = ['d', inf.name, outf.name] self.run_cmd(*args, binary=True) return tools.read_file(outf.name, binary=True)
def test_scan(self): """Test scanning of a driver""" fname = os.path.join(OUR_PATH, '..', '..', 'drivers/i2c/tegra_i2c.c') buff = tools.read_file(fname, False) scan = src_scan.Scanner(None, None) scan._parse_driver(fname, buff) self.assertIn('i2c_tegra', scan._drivers) drv = scan._drivers['i2c_tegra'] self.assertEqual('i2c_tegra', drv.name) self.assertEqual('UCLASS_I2C', drv.uclass_id) self.assertEqual( {'nvidia,tegra114-i2c': 'TYPE_114', 'nvidia,tegra124-i2c': 'TYPE_114', 'nvidia,tegra20-i2c': 'TYPE_STD', 'nvidia,tegra20-i2c-dvc': 'TYPE_DVC'}, drv.compat) self.assertEqual('i2c_bus', drv.priv) self.assertEqual(1, len(scan._drivers)) self.assertEqual({}, scan._warnings)
def adjust_cfg_file(fname, adjust_cfg): """Make adjustments to a .config file Args: fname (str): Filename of .config file to change adjust_cfg (dict of str): Changes to make to .config file before building: key: str config to change, without the CONFIG_ prefix, e.g. FRED value: str change to make (C is config option without prefix): C to enable C ~C to disable C C=val to set the value of C (val must have quotes if C is a string Kconfig) """ lines = tools.read_file(fname, binary=False).splitlines() out_lines = adjust_cfg_lines(lines, adjust_cfg) out = '\n'.join(out_lines) + '\n' tools.write_file(fname, out, binary=False)
def parse_macros(srcdir): """parse_macros: Parse the firmware_image_package.h file Args: srcdir (str): 'arm-trusted-firmware' source directory Returns: dict: key: UUID macro name, e.g. 'UUID_TRUSTED_FWU_CERT' value: list: file comment, e.g. 'ToC Entry UUIDs' macro name, e.g. 'UUID_TRUSTED_FWU_CERT' uuid as bytes(16) Raises: ValueError: a line cannot be parsed """ re_uuid = re.compile('0x[0-9a-fA-F]{2}') re_comment = re.compile(r'^/\* (.*) \*/$') fname = os.path.join(srcdir, 'include/tools_share/firmware_image_package.h') data = tools.read_file(fname, binary=False) macros = collections.OrderedDict() comment = None for linenum, line in enumerate(data.splitlines()): if line.startswith('/*'): mat = re_comment.match(line) if mat: comment = mat.group(1) else: # Example: #define UUID_TOS_FW_CONFIG \ if 'UUID' in line: macro = line.split()[1] elif '{{' in line: mat = re_uuid.findall(line) if not mat or len(mat) != 16: raise ValueError( f'{fname}: Cannot parse UUID line {linenum + 1}: Got matches: {mat}') uuid = bytes([int(val, 16) for val in mat]) macros[macro] = comment, macro, uuid if not macros: raise ValueError(f'{fname}: Cannot parse file') return macros
def UpdateFile(infile, outfile, start_sym, end_sym, insert): tout.notice("Creating file '%s' with data length %#x (%d) between symbols '%s' and '%s'" % (outfile, len(insert), len(insert), start_sym, end_sym)) syms = GetSymbolFileOffset(infile, [start_sym, end_sym]) if len(syms) != 2: raise ValueError("Expected two symbols '%s' and '%s': got %d: %s" % (start_sym, end_sym, len(syms), ','.join(syms.keys()))) size = syms[end_sym].offset - syms[start_sym].offset if len(insert) > size: raise ValueError("Not enough space in '%s' for data length %#x (%d); size is %#x (%d)" % (infile, len(insert), len(insert), size, size)) data = tools.read_file(infile) newdata = data[:syms[start_sym].offset] newdata += insert + tools.get_bytes(0, size - len(insert)) newdata += data[syms[end_sym].offset:] tools.write_file(outfile, newdata) tout.info('Written to offset %#x' % syms[start_sym].offset)
def testDecodeElf(self): """Test for the MakeElf function""" if not elf.ELF_TOOLS: self.skipTest('Python elftools not available') outdir = tempfile.mkdtemp(prefix='elf.') expected_text = b'1234' expected_data = b'wxyz' elf_fname = os.path.join(outdir, 'elf') elf.MakeElf(elf_fname, expected_text, expected_data) data = tools.read_file(elf_fname) load = 0xfef20000 entry = load + 2 expected = expected_text + expected_data self.assertEqual(elf.ElfInfo(expected, load, entry, len(expected)), elf.DecodeElf(data, 0)) self.assertEqual( elf.ElfInfo(b'\0\0' + expected[2:], load, entry, len(expected)), elf.DecodeElf(data, load + 2)) shutil.rmtree(outdir)
def create_fiptool_image(self): """Create an image with fiptool which we can use for testing Returns: FipReader: reader for the image """ fwu = os.path.join(self._indir, 'fwu') tools.write_file(fwu, self.fwu_data) tb_fw = os.path.join(self._indir, 'tb_fw') tools.write_file(tb_fw, self.tb_fw_data) other_fw = os.path.join(self._indir, 'other_fw') tools.write_file(other_fw, self.other_fw_data) fname = tools.get_output_filename('data.fip') uuid = 'e3b78d9e-4a64-11ec-b45c-fba2b9b49788' FIPTOOL.create_new(fname, 8, 0x123, fwu, tb_fw, uuid, other_fw) return fip_util.FipReader(tools.read_file(fname))
def _compare_expected_cbfs(self, data, cbfstool_fname): """Compare against what cbfstool creates This compares what binman creates with what cbfstool creates for what is proportedly the same thing. Args: data: CBFS created by binman cbfstool_fname: CBFS created by cbfstool """ if not self.have_cbfstool or not self.have_lz4: return expect = tools.read_file(cbfstool_fname) if expect != data: tools.write_file('/tmp/expect', expect) tools.write_file('/tmp/actual', data) print( 'diff -y <(xxd -g1 /tmp/expect) <(xxd -g1 /tmp/actual) | colordiff' ) self.fail('cbfstool produced a different result')
def ObtainContents(self): data = b'' for entry in self._mkimage_entries.values(): # First get the input data and put it in a file. If not available, # try later. if not entry.ObtainContents(): return False data += entry.GetData() uniq = self.GetUniqueName() input_fname = tools.get_output_filename('mkimage.%s' % uniq) tools.write_file(input_fname, data) output_fname = tools.get_output_filename('mkimage-out.%s' % uniq) if self.mkimage.run_cmd('-d', input_fname, *self._args, output_fname) is not None: self.SetContents(tools.read_file(output_fname)) else: # Bintool is missing; just use the input data as the output self.record_missing_bintool(self.mkimage) self.SetContents(data) return True