def _gen_fdt_nodes(base_node, node, depth, in_images): """Generate FDT nodes This creates one node for each member of self._fdts using the provided template. If a property value contains 'NAME' it is replaced with the filename of the FDT. If a property value contains SEQ it is replaced with the node sequence number, where 1 is the first. Args: node (None): Generator node to process depth: Current node depth (0 is the base 'fit' node) in_images: True if this is inside the 'images' node, so that 'data' properties should be generated """ if self._fdts: # Generate nodes for each FDT for seq, fdt_fname in enumerate(self._fdts): node_name = node.name[1:].replace('SEQ', str(seq + 1)) fname = tools.get_input_filename(fdt_fname + '.dtb') with fsw.add_node(node_name): for pname, prop in node.props.items(): if pname == 'fit,loadables': val = '\0'.join(self._loadables) + '\0' fsw.property('loadables', val.encode('utf-8')) elif pname == 'fit,operation': pass elif pname.startswith('fit,'): self._raise_subnode( node, f"Unknown directive '{pname}'") else: val = prop.bytes.replace( b'NAME', tools.to_bytes(fdt_fname)) val = val.replace(b'SEQ', tools.to_bytes(str(seq + 1))) fsw.property(pname, val) # Add data for 'images' nodes (but not 'config') if depth == 1 and in_images: fsw.property('data', tools.read_file(fname)) for subnode in node.subnodes: with fsw.add_node(subnode.name): _add_node(node, depth + 1, subnode) else: if self._fdts is None: if self._fit_list_prop: self.Raise( 'Generator node requires ' f"'{self._fit_list_prop.value}' entry argument") else: self.Raise( "Generator node requires 'fit,fdt-list' property")
def __init__(self, section, etype, node): super().__init__(section, etype, node) value = fdt_util.GetString(self._node, 'text') if value: value = tools.to_bytes(value) else: label, = self.GetEntryArgsOrProps([EntryArg('text-label', str)]) self.text_label = label if self.text_label: value, = self.GetEntryArgsOrProps( [EntryArg(self.text_label, str)]) value = tools.to_bytes(value) if value is not None else value self.value = value
def testAddMore(self): """Test various other methods for adding and setting properties""" self.node.AddZeroProp('one') self.dtb.Sync(auto_resize=True) data = self.fdt.getprop(self.node.Offset(), 'one') self.assertEqual(0, fdt32_to_cpu(data)) self.node.SetInt('one', 1) self.dtb.Sync(auto_resize=False) data = self.fdt.getprop(self.node.Offset(), 'one') self.assertEqual(1, fdt32_to_cpu(data)) val = 1234 self.node.AddInt('integer', val) self.dtb.Sync(auto_resize=True) data = self.fdt.getprop(self.node.Offset(), 'integer') self.assertEqual(val, fdt32_to_cpu(data)) val = '123' + chr(0) + '456' self.node.AddString('string', val) self.dtb.Sync(auto_resize=True) data = self.fdt.getprop(self.node.Offset(), 'string') self.assertEqual(tools.to_bytes(val) + b'\0', data) self.fdt.pack() self.node.SetString('string', val + 'x') with self.assertRaises(libfdt.FdtException) as e: self.dtb.Sync(auto_resize=False) self.assertIn('FDT_ERR_NOSPACE', str(e.exception)) self.node.SetString('string', val[:-1]) prop = self.node.props['string'] prop.SetData(tools.to_bytes(val)) self.dtb.Sync(auto_resize=False) data = self.fdt.getprop(self.node.Offset(), 'string') self.assertEqual(tools.to_bytes(val), data) self.node.AddEmptyProp('empty', 5) self.dtb.Sync(auto_resize=True) prop = self.node.props['empty'] prop.SetData(tools.to_bytes(val)) self.dtb.Sync(auto_resize=False) data = self.fdt.getprop(self.node.Offset(), 'empty') self.assertEqual(tools.to_bytes(val), data) self.node.SetData('empty', b'123') self.assertEqual(b'123', prop.bytes) # Trying adding a lot of data at once self.node.AddData('data', tools.get_bytes(65, 20000)) self.dtb.Sync(auto_resize=True)
def _pack_string(instr): """Pack a string to the required aligned size by adding padding Args: instr: String to process Returns: String with required padding (at least one 0x00 byte) at the end """ val = tools.to_bytes(instr) pad_len = align_int(len(val) + 1, FILENAME_ALIGN) return val + tools.get_bytes(0, pad_len - len(val))
def _scan_gen_fdt_nodes(subnode, depth, in_images): """Generate FDT nodes This creates one node for each member of self._fdts using the provided template. If a property value contains 'NAME' it is replaced with the filename of the FDT. If a property value contains SEQ it is replaced with the node sequence number, where 1 is the first. Args: subnode (None): Generator node to process depth: Current node depth (0 is the base 'fit' node) in_images: True if this is inside the 'images' node, so that 'data' properties should be generated """ if self._fdts: # Generate nodes for each FDT for seq, fdt_fname in enumerate(self._fdts): node_name = subnode.name[1:].replace('SEQ', str(seq + 1)) fname = tools.get_input_filename(fdt_fname + '.dtb') with fsw.add_node(node_name): for pname, prop in subnode.props.items(): val = prop.bytes.replace(b'NAME', tools.to_bytes(fdt_fname)) val = val.replace(b'SEQ', tools.to_bytes(str(seq + 1))) fsw.property(pname, val) # Add data for 'images' nodes (but not 'config') if depth == 1 and in_images: fsw.property('data', tools.read_file(fname)) else: if self._fdts is None: if self._fit_list_prop: self.Raise( "Generator node requires '%s' entry argument" % self._fit_list_prop.value) else: self.Raise( "Generator node requires 'fit,fdt-list' property")
def MakeEnvironment(self, full_path): """Returns an environment for using the toolchain. Thie takes the current environment and adds CROSS_COMPILE so that the tool chain will operate correctly. This also disables localized output and possibly unicode encoded output of all build tools by adding LC_ALL=C. Note that os.environb is used to obtain the environment, since in some cases the environment many contain non-ASCII characters and we see errors like: UnicodeEncodeError: 'utf-8' codec can't encode characters in position 569-570: surrogates not allowed Args: full_path: Return the full path in CROSS_COMPILE and don't set PATH Returns: Dict containing the (bytes) environment to use. This is based on the current environment, with changes as needed to CROSS_COMPILE, PATH and LC_ALL. """ env = dict(os.environb) wrapper = self.GetWrapper() if self.override_toolchain: # We'll use MakeArgs() to provide this pass elif full_path: env[b'CROSS_COMPILE'] = tools.to_bytes( wrapper + os.path.join(self.path, self.cross)) else: env[b'CROSS_COMPILE'] = tools.to_bytes(wrapper + self.cross) env[b'PATH'] = tools.to_bytes(self.path) + b':' + env[b'PATH'] env[b'LC_ALL'] = b'C' return env
def ConvertName(field_names, fields): """Convert a name to something flashrom likes Flashrom requires upper case, underscores instead of hyphens. We remove any null characters as well. This updates the 'name' value in fields. Args: field_names: List of field names for this struct fields: Dict: key: Field name value: value of that field (string for the ones we support) """ name_index = field_names.index('name') fields[name_index] = tools.to_bytes(NameToFmap(fields[name_index]))
def _AddNode(base_node, depth, node): """Add a node to the FIT Args: base_node: Base Node of the FIT (with 'description' property) depth: Current node depth (0 is the base node) node: Current node to process There are two cases to deal with: - hash and signature nodes which become part of the FIT - binman entries which are used to define the 'data' for each image """ for pname, prop in node.props.items(): if not pname.startswith('fit,'): if pname == 'default': val = prop.value # Handle the 'default' property if val.startswith('@'): if not self._fdts: continue if not self._fit_default_dt: self.Raise("Generated 'default' node requires default-dt entry argument") if self._fit_default_dt not in self._fdts: self.Raise("default-dt entry argument '%s' not found in fdt list: %s" % (self._fit_default_dt, ', '.join(self._fdts))) seq = self._fdts.index(self._fit_default_dt) val = val[1:].replace('DEFAULT-SEQ', str(seq + 1)) fsw.property_string(pname, val) continue fsw.property(pname, prop.bytes) rel_path = node.path[len(base_node.path):] in_images = rel_path.startswith('/images') has_images = depth == 2 and in_images if has_images: # This node is a FIT subimage node (e.g. "/images/kernel") # containing content nodes. We collect the subimage nodes and # section entries for them here to merge the content subnodes # together and put the merged contents in the subimage node's # 'data' property later. entry = Entry.Create(self.section, node, etype='section') entry.ReadNode() self._entries[rel_path] = entry for subnode in node.subnodes: if has_images and not (subnode.name.startswith('hash') or subnode.name.startswith('signature')): # This subnode is a content node not meant to appear in # the FIT (e.g. "/images/kernel/u-boot"), so don't call # fsw.add_node() or _AddNode() for it. pass elif self.GetImage().generate and subnode.name.startswith('@'): if self._fdts: # Generate notes for each FDT for seq, fdt_fname in enumerate(self._fdts): node_name = subnode.name[1:].replace('SEQ', str(seq + 1)) fname = tools.get_input_filename(fdt_fname + '.dtb') with fsw.add_node(node_name): for pname, prop in subnode.props.items(): val = prop.bytes.replace( b'NAME', tools.to_bytes(fdt_fname)) val = val.replace( b'SEQ', tools.to_bytes(str(seq + 1))) fsw.property(pname, val) # Add data for 'fdt' nodes (but not 'config') if depth == 1 and in_images: fsw.property('data', tools.read_file(fname)) else: if self._fdts is None: if self._fit_list_prop: self.Raise("Generator node requires '%s' entry argument" % self._fit_list_prop.value) else: self.Raise("Generator node requires 'fit,fdt-list' property") else: with fsw.add_node(subnode.name): _AddNode(base_node, depth + 1, subnode)