def parse_atf_source(srcdir, dstfile, oldfile): """parse_atf_source(): Parse the ATF source tree and update this file Args: srcdir (str): Path to 'arm-trusted-firmware' directory. Get this from: https://github.com/ARM-software/arm-trusted-firmware.git dstfile (str): File to write new code to, if an update is needed oldfile (str): Python source file to compare against Raises: ValueError: srcdir readme.rst is missing or the first line does not match what is expected """ # We expect a readme file readme_fname = os.path.join(srcdir, 'readme.rst') if not os.path.exists(readme_fname): raise ValueError( f"Expected file '{readme_fname}' - try using -s to specify the " 'arm-trusted-firmware directory') readme = tools.ReadFile(readme_fname, binary=False) first_line = 'Trusted Firmware-A' if readme.splitlines()[0] != first_line: raise ValueError(f"'{readme_fname}' does not start with '{first_line}'") macros = parse_macros(srcdir) names = parse_names(srcdir) output = create_code_output(macros, names) orig = tools.ReadFile(oldfile, binary=False) re_fip_list = re.compile(r'(.*FIP_TYPE_LIST = \[).*?( ] # end.*)', re.S) mat = re_fip_list.match(orig) new_code = mat.group(1) + '\n' + output + mat.group(2) if mat else output if new_code == orig: print(f"Existing code in '{oldfile}' is up-to-date") else: tools.WriteFile(dstfile, new_code, binary=False) print(f'Needs update, try:\n\tmeld {dstfile} {oldfile}')
def ObtainContents(self): gbb = 'gbb.bin' fname = tools.GetOutputFilename(gbb) if not self.size: self.Raise('GBB must have a fixed size') gbb_size = self.size bmpfv_size = gbb_size - 0x2180 if bmpfv_size < 0: self.Raise('GBB is too small (minimum 0x2180 bytes)') keydir = tools.GetInputFilename(self.keydir) stdout = self.futility.gbb_create( fname, [0x100, 0x1000, bmpfv_size, 0x1000]) if stdout is not None: stdout = self.futility.gbb_set( fname, hwid=self.hardware_id, rootkey='%s/root_key.vbpubk' % keydir, recoverykey='%s/recovery_key.vbpubk' % keydir, flags=self.gbb_flags, bmpfv=tools.GetInputFilename(self.bmpblk)) if stdout is not None: self.SetContents(tools.ReadFile(fname)) else: # Bintool is missing; just use the required amount of zero data self.record_missing_bintool(self.futility) self.SetContents(tools.GetBytes(0, gbb_size)) return True
def ObtainContents(self): """Obtain the contents of the FIT This adds the 'data' properties to the input ITB (Image-tree Binary) then runs mkimage to process it. """ # self._BuildInput() either returns bytes or raises an exception. data = self._BuildInput(self._fdt) uniq = self.GetUniqueName() input_fname = tools.GetOutputFilename('%s.itb' % uniq) output_fname = tools.GetOutputFilename('%s.fit' % uniq) tools.WriteFile(input_fname, data) tools.WriteFile(output_fname, data) args = {} ext_offset = self._fit_props.get('fit,external-offset') if ext_offset is not None: args = { 'external': True, 'pad': fdt_util.fdt32_to_cpu(ext_offset.value) } if self.mkimage.run(reset_timestamp=True, output_fname=output_fname, **args) is not None: self.SetContents(tools.ReadFile(output_fname)) else: # Bintool is missing; just use empty data as the output self.record_missing_bintool(self.mkimage) self.SetContents(tools.GetBytes(0, 1024)) return True
def check_cfg_file(fname, adjust_cfg): """Check that a config file has been adjusted according to adjust_cfg Args: fname (str): Filename of .config file to change adjust_cfg (dict of str): Changes to make to .config file before building: key: str config to change, without the CONFIG_ prefix, e.g. FRED value: str change to make (C is config option without prefix): C to enable C ~C to disable C C=val to set the value of C (val must have quotes if C is a string Kconfig) Returns: str: None if OK, else an error string listing the problems """ lines = tools.ReadFile(fname, binary=False).splitlines() bad_cfgs = check_cfg_lines(lines, adjust_cfg) if bad_cfgs: out = [f'{cfg:20} {line}' for cfg, line in bad_cfgs] content = '\\n'.join(out) return f''' Some CONFIG adjustments did not take effect. This may be because the request CONFIGs do not exist or conflict with others. Failed adjustments: {content} ''' return None
def GetVblock(self): # Join up the data files to be signed input_data = b'' for entry_phandle in self.content: data = self.section.GetContentsByPhandle(entry_phandle, self) if data is None: # Data not available yet return False input_data += data uniq = self.GetUniqueName() output_fname = tools.GetOutputFilename('vblock.%s' % uniq) input_fname = tools.GetOutputFilename('input.%s' % uniq) tools.WriteFile(input_fname, input_data) prefix = self.keydir + '/' args = [ 'vbutil_firmware', '--vblock', output_fname, '--keyblock', prefix + self.keyblock, '--signprivate', prefix + self.signprivate, '--version', '%d' % self.version, '--fv', input_fname, '--kernelkey', prefix + self.kernelkey, '--flags', '%d' % self.preamble_flags, ] #out.Notice("Sign '%s' into %s" % (', '.join(self.value), self.label)) stdout = tools.Run('futility', *args) return tools.ReadFile(output_fname)
def test_multi_to_file(self): """Test output of multiple pieces to a single file""" dtb_file = get_dtb_file('dtoc_test_simple.dts') output = tools.GetOutputFilename('output') self.run_test(['all'], dtb_file, output) data = tools.ReadFile(output, binary=False) self._check_strings(self.platdata_text + self.struct_text, data)
def ReadFileContents(self, pathname): """Read blob contents into memory This function compresses the data before returning if needed. We assume the data is small enough to fit into memory. If this is used for large filesystem image that might not be true. In that case, Image.BuildImage() could be adjusted to use a new Entry method which can read in chunks. Then we could copy the data in chunks and avoid reading it all at once. For now this seems like an unnecessary complication. Args: pathname (str): Pathname to read from Returns: bytes: Data read """ state.TimingStart('read') indata = tools.ReadFile(pathname) state.TimingAccum('read') state.TimingStart('compress') data = self.CompressData(indata) state.TimingAccum('compress') return data
def test_fetch_pass_fail(self): """Test fetching multiple tools with some passing and some failing""" def handle_download(_): """Take the tools.Download() function by writing a file""" if self.seq: raise urllib.error.URLError('not found') self.seq += 1 tools.WriteFile(fname, expected) return fname, dirname expected = b'this is a test' dirname = os.path.join(self._indir, 'download_dir') os.mkdir(dirname) fname = os.path.join(dirname, 'downloaded') destdir = os.path.join(self._indir, 'dest_dir') os.mkdir(destdir) dest_fname = os.path.join(destdir, '_testing') self.seq = 0 with unittest.mock.patch.object(bintool, 'DOWNLOAD_DESTDIR', destdir): with unittest.mock.patch.object(tools, 'Download', side_effect=handle_download): with test_util.capture_sys_output() as (stdout, _): Bintool.fetch_tools(bintool.FETCH_ANY, ['_testing'] * 2) self.assertTrue(os.path.exists(dest_fname)) data = tools.ReadFile(dest_fname) self.assertEqual(expected, data) lines = stdout.getvalue().splitlines() self.assertTrue(len(lines) > 2) self.assertEqual('Tools fetched: 1: _testing', lines[-2]) self.assertEqual('Failures: 1: _testing', lines[-1])
def test_all_bintools(self): """Test that all bintools can handle all available fetch types""" def handle_download(_): """Take the tools.Download() function by writing a file""" tools.WriteFile(fname, expected) return fname, dirname def fake_run(*cmd): if cmd[0] == 'make': # See Bintool.build_from_git() tmpdir = cmd[2] self.fname = os.path.join(tmpdir, 'pathname') tools.WriteFile(self.fname, b'hello') expected = b'this is a test' dirname = os.path.join(self._indir, 'download_dir') os.mkdir(dirname) fname = os.path.join(dirname, 'downloaded') with unittest.mock.patch.object(tools, 'Run', side_effect=fake_run): with unittest.mock.patch.object(tools, 'Download', side_effect=handle_download): with test_util.capture_sys_output() as _: for name in Bintool.get_tool_list(): btool = Bintool.create(name) for method in range(bintool.FETCH_COUNT): result = btool.fetch(method) self.assertTrue(result is not False) if result is not True and result is not None: result_fname, _ = result self.assertTrue(os.path.exists(result_fname)) data = tools.ReadFile(result_fname) self.assertEqual(expected, data) os.remove(result_fname)
def GetFdtContents(etype='u-boot-dtb'): """Looks up the FDT pathname and contents This is used to obtain the Fdt pathname and contents when needed by an entry. It supports a 'fake' dtb, allowing tests to substitute test data for the real dtb. Args: etype: Entry type to look up (e.g. 'u-boot.dtb'). Returns: tuple: pathname to Fdt Fdt data (as bytes) """ if etype not in output_fdt_info: return None, None if not use_fake_dtb: pathname = GetFdtPath(etype) data = GetFdtForEtype(etype).GetContents() else: fname = output_fdt_info[etype][1] pathname = tools.GetInputFilename(fname) data = tools.ReadFile(pathname) return pathname, data
def test_cbfs_stage(self): """Tests handling of a Coreboot Filesystem (CBFS)""" if not elf.ELF_TOOLS: self.skipTest('Python elftools not available') elf_fname = os.path.join(self._indir, 'cbfs-stage.elf') elf.MakeElf(elf_fname, U_BOOT_DATA, U_BOOT_DTB_DATA) size = 0xb0 cbw = CbfsWriter(size) cbw.add_file_stage('u-boot', tools.ReadFile(elf_fname)) data = cbw.get_data() cbfs = self._check_hdr(data, size) load = 0xfef20000 entry = load + 2 cfile = self._check_uboot(cbfs, cbfs_util.TYPE_STAGE, offset=0x28, data=U_BOOT_DATA + U_BOOT_DTB_DATA) self.assertEqual(entry, cfile.entry) self.assertEqual(load, cfile.load) self.assertEqual( len(U_BOOT_DATA) + len(U_BOOT_DTB_DATA), cfile.data_len) # Compare against what cbfstool creates if self.have_cbfstool: cbfs_fname = os.path.join(self._indir, 'test.cbfs') self.cbfstool.create_new(cbfs_fname, size) self.cbfstool.add_stage(cbfs_fname, 'u-boot', elf_fname) self._compare_expected_cbfs(data, cbfs_fname)
def ObtainContents(self): gbb = 'gbb.bin' fname = tools.GetOutputFilename(gbb) if not self.size: self.Raise('GBB must have a fixed size') gbb_size = self.size bmpfv_size = gbb_size - 0x2180 if bmpfv_size < 0: self.Raise('GBB is too small (minimum 0x2180 bytes)') sizes = [0x100, 0x1000, bmpfv_size, 0x1000] sizes = ['%#x' % size for size in sizes] keydir = tools.GetInputFilename(self.keydir) gbb_set_command = [ 'gbb_utility', '-s', '--hwid=%s' % self.hardware_id, '--rootkey=%s/root_key.vbpubk' % keydir, '--recoverykey=%s/recovery_key.vbpubk' % keydir, '--flags=%d' % self.gbb_flags, '--bmpfv=%s' % tools.GetInputFilename(self.bmpblk), fname ] tools.Run('futility', 'gbb_utility', '-c', ','.join(sizes), fname) tools.Run('futility', *gbb_set_command) self.SetContents(tools.ReadFile(fname)) return True
def parse_names(srcdir): """parse_names: Parse the tbbr_config.c file Args: srcdir (str): 'arm-trusted-firmware' source directory Returns: tuple: dict of entries: key: UUID macro, e.g. 'UUID_NON_TRUSTED_FIRMWARE_BL33' tuple: entry information Description of entry, e.g. 'Non-Trusted Firmware BL33' UUID macro, e.g. 'UUID_NON_TRUSTED_FIRMWARE_BL33' Name of entry, e.g. 'nt-fw' Raises: ValueError: the file cannot be parsed """ # Extract the .name, .uuid and .cmdline_name values re_data = re.compile( r'\.name = "([^"]*)",\s*\.uuid = (UUID_\w*),\s*\.cmdline_name = "([^"]+)"', re.S) fname = os.path.join(srcdir, 'tools/fiptool/tbbr_config.c') data = tools.ReadFile(fname, binary=False) # Example entry: # { # .name = "Secure Payload BL32 Extra2 (Trusted OS Extra2)", # .uuid = UUID_SECURE_PAYLOAD_BL32_EXTRA2, # .cmdline_name = "tos-fw-extra2" # }, mat = re_data.findall(data) if not mat: raise ValueError(f'{fname}: Cannot parse file') names = {uuid: (desc, uuid, name) for desc, uuid, name in mat} return names
def ObtainContents(self): """Obtain the contents of the FIT This adds the 'data' properties to the input ITB (Image-tree Binary) then runs mkimage to process it. """ # self._BuildInput() either returns bytes or raises an exception. data = self._BuildInput(self._fdt) uniq = self.GetUniqueName() input_fname = tools.GetOutputFilename('%s.itb' % uniq) output_fname = tools.GetOutputFilename('%s.fit' % uniq) tools.WriteFile(input_fname, data) tools.WriteFile(output_fname, data) args = [] ext_offset = self._fit_props.get('fit,external-offset') if ext_offset is not None: args += [ '-E', '-p', '%x' % fdt_util.fdt32_to_cpu(ext_offset.value) ] tools.Run('mkimage', '-t', '-F', output_fname, *args) self.SetContents(tools.ReadFile(output_fname)) return True
def ReadBlobContents(self): if self._strip: uniq = self.GetUniqueName() out_fname = tools.GetOutputFilename('%s.stripped' % uniq) tools.WriteFile(out_fname, tools.ReadFile(self._pathname)) tools.Run('strip', out_fname) self._pathname = out_fname super().ReadBlobContents() return True
def testEmbedData(self): """Test for the GetSymbolFileOffset() function""" if not elf.ELF_TOOLS: self.skipTest('Python elftools not available') fname = self.ElfTestFile('embed_data') offset = elf.GetSymbolFileOffset(fname, ['embed_start', 'embed_end']) start = offset['embed_start'].offset end = offset['embed_end'].offset data = tools.ReadFile(fname) embed_data = data[start:end] expect = struct.pack('<III', 0x1234, 0x5678, 0) self.assertEqual(expect, embed_data)
def ReadBlobContents(self): indata = tools.ReadFile(self._pathname) data = b'' for line in indata.splitlines(): data += line + b'\0' data += b'\0'; pad = self.size - len(data) - 5 if pad < 0: self.Raise("'u-boot-env' entry too small to hold data (need %#x more bytes)" % -pad) data += tools.GetBytes(self.fill_value, pad) crc = zlib.crc32(data) buf = struct.pack('<I', crc) + b'\x01' + data self.SetContents(buf) return True
def FromFile(cls, fname): """Convert an image file into an Image for use in binman Args: fname: Filename of image file to read Returns: Image object on success Raises: ValueError if something goes wrong """ data = tools.ReadFile(fname) size = len(data) # First look for an image header pos = image_header.LocateHeaderOffset(data) if pos is None: # Look for the FDT map pos = fdtmap.LocateFdtmap(data) if pos is None: raise ValueError('Cannot find FDT map in image') # We don't know the FDT size, so check its header first probe_dtb = fdt.Fdt.FromData(data[pos + fdtmap.FDTMAP_HDR_LEN:pos + 256]) dtb_size = probe_dtb.GetFdtObj().totalsize() fdtmap_data = data[pos:pos + dtb_size + fdtmap.FDTMAP_HDR_LEN] fdt_data = fdtmap_data[fdtmap.FDTMAP_HDR_LEN:] out_fname = tools.GetOutputFilename('fdtmap.in.dtb') tools.WriteFile(out_fname, fdt_data) dtb = fdt.Fdt(out_fname) dtb.Scan() # Return an Image with the associated nodes root = dtb.GetRoot() image = Image('image', root, copy_to_orig=False, ignore_missing=True, missing_etype=True) image.image_node = fdt_util.GetString(root, 'image-node', 'image') image.fdtmap_dtb = dtb image.fdtmap_data = fdtmap_data image._data = data image._filename = fname image.image_name, _ = os.path.splitext(fname) return image
def ObtainContents(self): data = b'' for entry in self._mkimage_entries.values(): # First get the input data and put it in a file. If not available, # try later. if not entry.ObtainContents(): return False data += entry.GetData() uniq = self.GetUniqueName() input_fname = tools.GetOutputFilename('mkimage.%s' % uniq) tools.WriteFile(input_fname, data) output_fname = tools.GetOutputFilename('mkimage-out.%s' % uniq) tools.Run('mkimage', '-d', input_fname, *self._args, output_fname) self.SetContents(tools.ReadFile(output_fname)) return True
def GetVersion(path=OUR_PATH): """Get the version string for binman Args: path: Path to 'version' file Returns: str: String version, e.g. 'v2021.10' """ version_fname = os.path.join(path, 'version') if os.path.exists(version_fname): version = tools.ReadFile(version_fname, binary=False) else: version = '(unreleased)' return version
def ReadBlobContents(self): """Read blob contents into memory This function compresses the data before storing if needed. We assume the data is small enough to fit into memory. If this is used for large filesystem image that might not be true. In that case, Image.BuildImage() could be adjusted to use a new Entry method which can read in chunks. Then we could copy the data in chunks and avoid reading it all at once. For now this seems like an unnecessary complication. """ indata = tools.ReadFile(self._pathname) data = self.CompressData(indata) self.SetContents(data) return True
def decompress(self, indata): """Decompress data with lzma_alone Args: indata (bytes): Data to decompress Returns: bytes: Decompressed data """ with tempfile.NamedTemporaryFile(prefix='decomp.tmp', dir=tools.GetOutputDir()) as inf: tools.WriteFile(inf.name, indata) with tempfile.NamedTemporaryFile(prefix='compo.otmp', dir=tools.GetOutputDir()) as outf: args = ['d', inf.name, outf.name] self.run_cmd(*args, binary=True) return tools.ReadFile(outf.name, binary=True)
def parse_macros(srcdir): """parse_macros: Parse the firmware_image_package.h file Args: srcdir (str): 'arm-trusted-firmware' source directory Returns: dict: key: UUID macro name, e.g. 'UUID_TRUSTED_FWU_CERT' value: list: file comment, e.g. 'ToC Entry UUIDs' macro name, e.g. 'UUID_TRUSTED_FWU_CERT' uuid as bytes(16) Raises: ValueError: a line cannot be parsed """ re_uuid = re.compile('0x[0-9a-fA-F]{2}') re_comment = re.compile(r'^/\* (.*) \*/$') fname = os.path.join(srcdir, 'include/tools_share/firmware_image_package.h') data = tools.ReadFile(fname, binary=False) macros = collections.OrderedDict() comment = None for linenum, line in enumerate(data.splitlines()): if line.startswith('/*'): mat = re_comment.match(line) if mat: comment = mat.group(1) else: # Example: #define UUID_TOS_FW_CONFIG \ if 'UUID' in line: macro = line.split()[1] elif '{{' in line: mat = re_uuid.findall(line) if not mat or len(mat) != 16: raise ValueError( f'{fname}: Cannot parse UUID line {linenum + 1}: Got matches: {mat}' ) uuid = bytes([int(val, 16) for val in mat]) macros[macro] = comment, macro, uuid if not macros: raise ValueError(f'{fname}: Cannot parse file') return macros
def test_scan(self): """Test scanning of a driver""" fname = os.path.join(OUR_PATH, '..', '..', 'drivers/i2c/tegra_i2c.c') buff = tools.ReadFile(fname, False) scan = src_scan.Scanner(None, None) scan._parse_driver(fname, buff) self.assertIn('i2c_tegra', scan._drivers) drv = scan._drivers['i2c_tegra'] self.assertEqual('i2c_tegra', drv.name) self.assertEqual('UCLASS_I2C', drv.uclass_id) self.assertEqual( { 'nvidia,tegra114-i2c': 'TYPE_114', 'nvidia,tegra20-i2c': 'TYPE_STD', 'nvidia,tegra20-i2c-dvc': 'TYPE_DVC' }, drv.compat) self.assertEqual('i2c_bus', drv.priv) self.assertEqual(1, len(scan._drivers))
def adjust_cfg_file(fname, adjust_cfg): """Make adjustments to a .config file Args: fname (str): Filename of .config file to change adjust_cfg (dict of str): Changes to make to .config file before building: key: str config to change, without the CONFIG_ prefix, e.g. FRED value: str change to make (C is config option without prefix): C to enable C ~C to disable C C=val to set the value of C (val must have quotes if C is a string Kconfig) """ lines = tools.ReadFile(fname, binary=False).splitlines() out_lines = adjust_cfg_lines(lines, adjust_cfg) out = '\n'.join(out_lines) + '\n' tools.WriteFile(fname, out, binary=False)
def testDecodeElf(self): """Test for the MakeElf function""" if not elf.ELF_TOOLS: self.skipTest('Python elftools not available') outdir = tempfile.mkdtemp(prefix='elf.') expected_text = b'1234' expected_data = b'wxyz' elf_fname = os.path.join(outdir, 'elf') elf.MakeElf(elf_fname, expected_text, expected_data) data = tools.ReadFile(elf_fname) load = 0xfef20000 entry = load + 2 expected = expected_text + expected_data self.assertEqual(elf.ElfInfo(expected, load, entry, len(expected)), elf.DecodeElf(data, 0)) self.assertEqual( elf.ElfInfo(b'\0\0' + expected[2:], load, entry, len(expected)), elf.DecodeElf(data, load + 2)) shutil.rmtree(outdir)
def test_simple(self): """Test output from some simple nodes with various types of data""" dtb_file = get_dtb_file('dtoc_test_simple.dts') output = tools.GetOutputFilename('output') self.run_test(['struct'], dtb_file, output) with open(output) as infile: data = infile.read() self._check_strings(self.struct_text, data) self.run_test(['platdata'], dtb_file, output) with open(output) as infile: data = infile.read() self._check_strings(self.platdata_text, data) # Try the 'all' command self.run_test(['all'], dtb_file, output) data = tools.ReadFile(output, binary=False) self._check_strings(self.platdata_text + self.struct_text, data)
def _compare_expected_cbfs(self, data, cbfstool_fname): """Compare against what cbfstool creates This compares what binman creates with what cbfstool creates for what is proportedly the same thing. Args: data: CBFS created by binman cbfstool_fname: CBFS created by cbfstool """ if not self.have_cbfstool or not self.have_lz4: return expect = tools.ReadFile(cbfstool_fname) if expect != data: tools.WriteFile('/tmp/expect', expect) tools.WriteFile('/tmp/actual', data) print( 'diff -y <(xxd -g1 /tmp/expect) <(xxd -g1 /tmp/actual) | colordiff' ) self.fail('cbfstool produced a different result')
def create_fiptool_image(self): """Create an image with fiptool which we can use for testing Returns: FipReader: reader for the image """ fwu = os.path.join(self._indir, 'fwu') tools.WriteFile(fwu, self.fwu_data) tb_fw = os.path.join(self._indir, 'tb_fw') tools.WriteFile(tb_fw, self.tb_fw_data) other_fw = os.path.join(self._indir, 'other_fw') tools.WriteFile(other_fw, self.other_fw_data) fname = tools.GetOutputFilename('data.fip') uuid = 'e3b78d9e-4a64-11ec-b45c-fba2b9b49788' FIPTOOL.create_new(fname, 8, 0x123, fwu, tb_fw, uuid, other_fw) return fip_util.FipReader(tools.ReadFile(fname))
def GetVblock(self, required): """Get the contents of this entry Args: required: True if the data must be present, False if it is OK to return None Returns: bytes content of the entry, which is the signed vblock for the provided data """ # Join up the data files to be signed input_data = self.GetContents(required) if input_data is None: return None uniq = self.GetUniqueName() output_fname = tools.GetOutputFilename('vblock.%s' % uniq) input_fname = tools.GetOutputFilename('input.%s' % uniq) tools.WriteFile(input_fname, input_data) prefix = self.keydir + '/' args = [ 'vbutil_firmware', '--vblock', output_fname, '--keyblock', prefix + self.keyblock, '--signprivate', prefix + self.signprivate, '--version', '%d' % self.version, '--fv', input_fname, '--kernelkey', prefix + self.kernelkey, '--flags', '%d' % self.preamble_flags, ] #out.Notice("Sign '%s' into %s" % (', '.join(self.value), self.label)) stdout = tools.Run('futility', *args) return tools.ReadFile(output_fname)