def __init__( self, data, debug_dir=None, debug_prefix=None, debug_suffix=None, ): # Public properties self.debug_dir = debug_dir self.debug_prefix = debug_prefix self.debug_suffix = debug_suffix # Store the original image self.store_debug_data(df.FILE_DATA_IN, data) """ Extract the various segments/sections of the data: 1. Elf header 2. Prog headers 3. Bin """ # Extract the header self.ehdr = extract_ehdr(data) self.store_debug_data(df.FILE_HDR_IN, self.ehdr.pack()) self.store_debug_data(df.FILE_HDR_IN_REPR, repr(self.ehdr), suffix=df.FILE_HDR_IN_REPR_SUFFIX) # Extract the program headers self.phdrs = extract_phdrs(data, self.ehdr) self.store_debug_data(df.FILE_PHDR_IN, pack_phdrs(self.phdrs)) self.store_debug_data(df.FILE_PHDR_IN_REPR, repr_phdrs(self.phdrs), suffix=df.FILE_PHDR_IN_REPR_SUFFIX) # Dump the individual segments self.segments = extract_segments(data, self.phdrs) for idx, phdr in enumerate(self.phdrs): length = len(self.segments[phdr]) is_load = self._segment_to_put(phdr) if length >= 0 and length <= 16: logger.debug(('' if is_load else 'Non-') + 'Loadable segment - ' + str(idx + 1) + ' is of size: ' + str(length)) if is_load and (length > 0 and length <= 16): logger.warning(('' if is_load else 'Non-') + 'Loadable segment - ' + str(idx + 1) + ' is of size: ' + str(length)) self.store_debug_data(df.FILE_SEGMENT_IN.format(idx), self.segments[phdr]) # Zero out the sections for now zero_out_sections(self.ehdr)
def validate(self): if not elf_vars[POLICY_OVERLAPPING_SEGMENTS_PHYSICAL].is_ignore(): # Validate physical addresses don't overlap if self.validate_ph_addrs: self.validate_addresses(PHY_ADDR_VALIDATION_DICT) if not elf_vars[POLICY_OVERLAPPING_SEGMENTS_VIRTUAL].is_ignore(): # Validate virtual addresses don't overlap if self.validate_vir_addrs: self.validate_addresses(VIR_ADDR_VALIDATION_DICT) if not elf_vars[POLICY_NON_LOAD_OUTSIDE_LOAD].is_ignore(): load_ranges = [] non_load_ranges = [] # Collect address ranges for idx, phdr in enumerate(self.parsegen.phdrs): if phdr.p_type == PT_LOAD: load_ranges.append( (phdr.p_offset, phdr.p_offset + phdr.p_filesz)) elif not (phdr.p_type == PT_NULL or phdr.p_type == PT_PHDR or phdr.p_filesz == 0 or self.segment_to_put(phdr)): non_load_ranges.append( (phdr.p_offset, phdr.p_offset + phdr.p_filesz, phdr, idx + 3)) # Sort ranges by start address load_ranges.sort(key=lambda x: x[0]) non_load_ranges.sort(key=lambda x: x[0]) # Check for non-encapsulated segments non_encap = [] non_encap_sno = [] for d in range(len(non_load_ranges)): for s in range(len(load_ranges)): # if true, means non-loadable segment is encapsulated within a loadable segment if load_ranges[s][0] <= non_load_ranges[d][ 0] and non_load_ranges[d][1] <= load_ranges[s][1]: break # means else-case IS NOT executed and non-loadable segment is encapsulated within a loadable segment # else case of inner for-loop if executed if break statement never executed meaning non-loadable segment is not encapsulated in any segment else: non_encap.append(non_load_ranges[d][2]) non_encap_sno.append(non_load_ranges[d][3]) if non_encap: # Create the error string elf_vars[POLICY_NON_LOAD_OUTSIDE_LOAD].run( "Following non-loadable segments are not completely encapsulated by a load segment: " + '\n' + repr_phdrs(non_encap, non_encap_sno))
def __init__(self, data, debug_dir=None, debug_prefix=None, debug_suffix=None, ): # Public properties self.debug_dir = debug_dir self.debug_prefix = debug_prefix self.debug_suffix = debug_suffix # Store the original image self.store_debug_data(df.FILE_DATA_IN, data) """ Extract the various segments/sections of the data: 1. Elf header 2. Prog headers 3. Bin """ # Extract the header self.ehdr = extract_ehdr(data) self.store_debug_data(df.FILE_HDR_IN, self.ehdr.pack()) self.store_debug_data(df.FILE_HDR_IN_REPR, repr(self.ehdr), suffix=df.FILE_HDR_IN_REPR_SUFFIX) # Extract the program headers self.phdrs = extract_phdrs(data, self.ehdr) self.store_debug_data(df.FILE_PHDR_IN, pack_phdrs(self.phdrs)) self.store_debug_data(df.FILE_PHDR_IN_REPR, repr_phdrs(self.phdrs), suffix=df.FILE_PHDR_IN_REPR_SUFFIX) # Dump the individual segments self.segments = extract_segments(data, self.phdrs) for idx, phdr in enumerate(self.phdrs): length = len(self.segments[phdr]) is_load = self._segment_to_put(phdr) if length >= 0 and length <= 16: logger.debug(('' if is_load else 'Non-') + 'Loadable segment - ' + str(idx + 1) + ' is of size: ' + str(length)) if is_load and (length > 0 and length <= 16): logger.warning(('' if is_load else 'Non-') + 'Loadable segment - ' + str(idx + 1) + ' is of size: ' + str(length)) self.store_debug_data(df.FILE_SEGMENT_IN.format(idx), self.segments[phdr]) # Zero out the sections for now zero_out_sections(self.ehdr)
def __repr__(self): return ('Elf Header: ' + '\n' + repr(self.ehdr) + '\n' 'Elf Program Headers: ' + '\n' + repr_phdrs(self.phdrs))
def _validate_segments(self): if not POLICY_OVERLAPPING_SEGMENTS.is_ignore(): # Collect all the phys addr ranges seg_ranges = [] for phdr in self._elf_parsegen.phdrs: if self._elf_parsegen._segment_to_put(phdr): seg_ranges.append( (phdr.p_paddr, phdr.p_paddr + phdr.p_memsz, phdr.p_memsz)) # Sort ranges by start address seg_ranges.sort(key=lambda x: x[0]) # Check for overlaps overlapping = [] for idx in range(len(seg_ranges) - 1): if seg_ranges[idx + 1][0] < seg_ranges[idx][1]: overlapping.append((seg_ranges[idx], seg_ranges[idx + 1])) if overlapping: # Create table header table = TablePrinter([1]) table.insert_data(0, 0, 'S.No.') table.insert_data(0, 1, 'Segment A', column_end=2) table.insert_data(0, 3, 'Segment B', column_end=4) # Create sub header table.insert_data(1, 1, 'Phys') table.insert_data(1, 2, 'Size') table.insert_data(1, 3, 'Phys') table.insert_data(1, 4, 'Size') # Add all the overlapping segments for idx, overlap in enumerate(overlapping): table.insert_data(idx + 2, 1, hex(overlap[0][0])) table.insert_data(idx + 2, 2, hex(overlap[0][2])) table.insert_data(idx + 2, 3, hex(overlap[1][0])) table.insert_data(idx + 2, 4, hex(overlap[1][2])) # Create the error string POLICY_OVERLAPPING_SEGMENTS.run( 'Following overlapping segments were found: ' + '\n' + '\n'.join([' ' + l for l in table.get_data().split('\n')])) if not POLICY_NON_LOAD_OUTSIDE_LOAD.is_ignore(): load_ranges = [] non_load_ranges = [] # Collect address ranges for idx, phdr in enumerate(self._elf_parsegen.phdrs): if self._elf_parsegen._segment_to_put(phdr): load_ranges.append( (phdr.p_offset, phdr.p_offset + phdr.p_filesz)) elif phdr.p_type != PT_NULL and phdr.p_filesz != 0: non_load_ranges.append( (phdr.p_offset, phdr.p_offset + phdr.p_filesz, phdr, idx + 3)) # Sort ranges by start address load_ranges.sort(key=lambda x: x[0]) non_load_ranges.sort(key=lambda x: x[0]) # Check for non-encapsulated segments non_encap = [] non_encap_sno = [] for d in range(len(non_load_ranges)): in_seg = False for s in range(len(load_ranges)): in_rng = load_ranges[s][0] <= non_load_ranges[d][ 0], non_load_ranges[d][1] <= load_ranges[s][1] if in_rng[0] and in_rng[1]: in_seg = True # if non load segment not encapsulated in any segment, add to non_encapsulated list if not in_seg: non_encap.append(non_load_ranges[d][2]) non_encap_sno.append(non_load_ranges[d][3]) if non_encap: # Create the error string POLICY_NON_LOAD_OUTSIDE_LOAD.run( "Following non-loadable segments found outside load segments: " + '\n' + repr_phdrs(non_encap, non_encap_sno))
def __init__(self, data=None, debug_dir=None, debug_prefix=None, debug_suffix=None, _class=ELFCLASS32, delegate=None, elf_properties=None): # Create empty elf file is data is None if data is None: data = create_empty_elf(_class) # Set the delegate if delegate is None: self.delegate = ParseGenElfDelegate(self) else: self.delegate = delegate # Public properties self.debug_dir = debug_dir self.debug_prefix = debug_prefix self.debug_suffix = debug_suffix # Store the original image self.store_debug_data(df.FILE_DATA_IN, data) """ Extract the various segments/sections of the data: 1. Elf header 2. Prog headers 3. Bin """ # Extract the header self.ehdr = extract_ehdr(data) self.store_debug_data(df.FILE_HDR_IN, self.ehdr.pack()) self.store_debug_data(df.FILE_HDR_IN_REPR, repr(self.ehdr), suffix=df.FILE_HDR_IN_REPR_SUFFIX) # Extract the program headers self.phdrs = extract_phdrs(data, self.ehdr) self.store_debug_data(df.FILE_PHDR_IN, pack_phdrs(self.phdrs)) self.store_debug_data(df.FILE_PHDR_IN_REPR, repr_phdrs(self.phdrs), suffix=df.FILE_PHDR_IN_REPR_SUFFIX) # Extract the section headers if elf_properties and not elf_properties.has_sections: zero_out_sections(self.ehdr) self.shdrs = extract_shdrs(data, self.ehdr) # Sort sections by whether they are encapsulated by segments self.encapsulated_sections_map, self.non_encapsulated_sections = self.sort_sections( ) # Extract section data from section header info and dump those which are outside segments self.sections = extract_sections(data, self.non_encapsulated_sections) for idx, shdr in enumerate(self.shdrs): if shdr in self.non_encapsulated_sections: self.store_debug_data(df.FILE_SECTION_IN.format(idx), self.sections[shdr]) # Dump the individual segments self.segments = extract_segments(data, self.phdrs) for idx, phdr in enumerate(self.phdrs): length = len(self.segments[phdr]) is_load = self.delegate.segment_to_put(phdr) if 0 <= length <= 16: logger.debug(('' if is_load else 'Non-') + 'Loadable segment - ' + str(idx + 1) + ' is of size: ' + str(length)) if is_load and 0 < length <= 16: logger.warning(('' if is_load else 'Non-') + 'Loadable segment - ' + str(idx + 1) + ' is of size: ' + str(length)) self.store_debug_data(df.FILE_SEGMENT_IN.format(idx), self.segments[phdr])
def __repr__(self): retval = 'Header: ' + '\n' + repr(self.ehdr) + '\n' \ 'Program Headers: ' + '\n' + repr_phdrs(self.phdrs) if self.shdrs: retval += '\nSection Headers: ' + '\n' + repr_shdrs(self.shdrs) return retval
def __init__( self, data=None, debug_dir=None, debug_prefix=None, debug_suffix=None, _class=None, delegate=None, ): # Create empty elf file is data is None if data is None: if _class is None: raise RuntimeError('Either data or class must be given.') data = create_empty_elf(_class) # Set the delegate if delegate is None: self.delegate = ParseGenElfDelegate(self) else: self.delegate = delegate # Public properties self.debug_dir = debug_dir self.debug_prefix = debug_prefix self.debug_suffix = debug_suffix # Store the original image self.store_debug_data(df.FILE_DATA_IN, data) """ Extract the various segments/sections of the data: 1. Elf header 2. Prog headers 3. Bin """ # Extract the header self.ehdr = extract_ehdr(data) self.store_debug_data(df.FILE_HDR_IN, self.ehdr.pack()) self.store_debug_data(df.FILE_HDR_IN_REPR, repr(self.ehdr), suffix=df.FILE_HDR_IN_REPR_SUFFIX) # Extract the program headers self.phdrs = extract_phdrs(data, self.ehdr) self.store_debug_data(df.FILE_PHDR_IN, pack_phdrs(self.phdrs)) self.store_debug_data(df.FILE_PHDR_IN_REPR, repr_phdrs(self.phdrs), suffix=df.FILE_PHDR_IN_REPR_SUFFIX) # Dump the individual segments self.segments = extract_segments(data, self.phdrs) for idx, phdr in enumerate(self.phdrs): length = len(self.segments[phdr]) is_load = self.delegate.segment_to_put(phdr) if length >= 0 and length <= 16: logger.debug(('' if is_load else 'Non-') + 'Loadable segment - ' + str(idx + 1) + ' is of size: ' + str(length)) if is_load and (length > 0 and length <= 16): logger.warning(('' if is_load else 'Non-') + 'Loadable segment - ' + str(idx + 1) + ' is of size: ' + str(length)) self.store_debug_data(df.FILE_SEGMENT_IN.format(idx), self.segments[phdr]) # Zero out the sections for now zero_out_sections(self.ehdr)
def __repr__(self): return ('Elf Header: ' + '\n' + repr(self.ehdr) + '\n' 'Elf Program Headers: ' + '\n' + repr_phdrs(self.phdrs))