示例#1
0
 def __init__(self, key):
     self.block_size, self.num_rounds = 0x10, 10  # 128-bit AES
     if len(key) != self.block_size:
         raise ValueError('Key must be of size %X!' % self.block_size)
     self.keys = [list(up('>IIII', key))]
     for i in range(1, self.num_rounds + 1):
         new_key = [
             self.key_schedule_core(self.keys[i - 1][3], i)
             ^ self.keys[i - 1][0]
         ]
         for j in range(1, 4):
             new_key.append(self.keys[i - 1][j] ^ new_key[j - 1])
         self.keys.append(new_key)
示例#2
0
def parse_fields(full, table):
    fields = []
    ofs = 0
    while True:
        val = up('<Q', full[table + ofs:table + ofs + 8])[0]
        if (val & 0xFFFFFFFF00000000) != LOAD_BASE:
            break
        s = read_string(full, val - LOAD_BASE)
        if not is_valid_field_name(s):
            break
        fields.append(s)
        ofs += 8
    return fields
示例#3
0
 def encrypt_block_ecb(self, block):
     words = list(up('>IIII', self.pad_block(block)))
     for i in range(len(words)):
         words[i] ^= self.keys[0][i]
     for rnd in range(1, self.num_rounds + 1):
         for i in range(len(words)):
             words[i] = self.send_through_sbox(words[i], self.sbox_enc)
         words = self.shift_columns(words)
         if rnd != self.num_rounds:
             words = self.mix_columns(words)
         for i in range(len(words)):
             words[i] ^= self.keys[rnd][i]
     return pk('>IIII', words[0], words[1], words[2], words[3])
示例#4
0
def main(argc, argv):
    if argc != 3:
        print 'Usage: %s in_file out_dir' % argv[0]
        return
    global archive, dirs_off, files_off, fdata_off
    try:
        archive = open(argv[1], 'rb')
        if read_at(archive, 0, 4) != 'HFS0':
            print 'Error: Invalid archive.'
            return
    except:
        print 'Failed to open %s.' % argv[1]
        return

    global out_dir
    out_dir = argv[2]

    magic, num_files, name_table_size, reserved = up('<IIII',
                                                     read_at(archive, 0, 0x10))

    name_table = read_at(archive, 0x10 + 0x40 * num_files, name_table_size)

    archive.seek(0x10)

    files = []
    for i in range(num_files):
        f_meta = archive.read(0x40)
        offset, size, string_ofs, hsh_sz, unk2, unk3, hsh = up(
            '<QQIIII32s', f_meta)
        name = name_table[string_ofs:]
        if '\x00' in name:
            name = name[:name.index('\x00')]
        files.append((0x10 + 0x40 * num_files + name_table_size + offset, size,
                      name, hsh_sz, hsh))

    for file in files:
        offset, size, name, hsh_sz, hsh = file
        dump_file(name, offset, size, hsh_sz, hsh)
 def decrypt_block_ecb(self, block):
     assert (len(block) == self.block_size)
     words = list(up('>IIII', block))
     for rnd in range(self.num_rounds, 0, -1):
         for i in range(len(words)):
             words[i] ^= self.keys[rnd][i]
         if rnd != self.num_rounds:
             words = self.unmix_columns(words)
         words = self.unshift_columns(words)
         for i in range(len(words)):
             words[i] = self.send_through_sbox(words[i], self.sbox_dec)
     for i in range(len(words)):
         words[i] ^= self.keys[0][i]
     return pk('>IIII', words[0], words[1], words[2], words[3])
示例#6
0
def split_binary(data):
    A, B, START, BOOT_CODE_START, BOOT_CODE_END, PROGRAM_START, C, D = up(
        '<QQQQQQQQ', data[:0x40])
    assert A == 0xAAAAAAAAAAAAAAAA
    assert B == 0xBBBBBBBBBBBBBBBB
    assert C == 0xCCCCCCCCCCCCCCCC
    assert D == 0xDDDDDDDDDDDDDDDD
    data = data[0x40:]

    #print ('%X %X %X %X' % (START, BOOT_CODE_START, BOOT_CODE_END, PROGRAM_START))
    boot_code = data[BOOT_CODE_START - START:BOOT_CODE_END - BOOT_CODE_START]
    program = data[PROGRAM_START - START:]
    return [('boot_code.lz4', lz4_compress(boot_code)),
            ('program.lz4', lz4_compress(program))]
def uncompress_to_file(f, data):
    def align(ofs):
        return (ofs + 0x7F) & ~0x7F

    split_size, num_entries, total_size = up('<III', data[:0xC])
    splits = [
        up('<I', entry)[0] for entry in iterate(data, 0xC, num_entries, 4)
    ]
    ofs = align(0xC + 0x4 * num_entries)
    out_data = ''
    for i, split in enumerate(splits):
        #print '%X, %X' % (ofs, split)
        cur_comp = up('<I', data[ofs:ofs + 4])[0]
        if i == num_entries - 1:
            if cur_comp != split - 4:
                assert split + (split_size * (num_entries - 1)) == total_size
                f.write(data[ofs:ofs + split])
            else:
                f.write(zlib.decompress(data[ofs + 4:ofs + 4 + cur_comp]))
        else:
            assert cur_comp == split - 4
            f.write(zlib.decompress(data[ofs + 4:ofs + 4 + cur_comp]))
        ofs = align(ofs + split)
def make_standard(exp):
    std = exp[:]
    _, metadata_offset, is_exp = up('<III', exp[:12])
    assert is_exp == 1

    # Patch the experimental flag to zero.
    std = std[:8] + pk('<I', 0) + std[12:]

    # Locate the mesosphere content header, patch to be experimental.
    magic, size, code_ofs, content_ofs, num_contents, ver, sup_ver, rev = up(
        '<IIIIIIII', exp[metadata_offset:metadata_offset + 0x20])
    for i in range(num_contents):
        start, size, cnt_type, flag0, flag1, flag2, pad = up(
            '<IIBBBBI',
            exp[content_ofs + 0x20 * i:content_ofs + 0x20 * i + 0x10])
        if cnt_type == 10:  # CONTENT_TYPE_KRN
            assert exp[content_ofs + 0x20 * i + 0x10:content_ofs + 0x20 * i +
                       0x10 + len(b'mesosphere') + 1] == (b'mesosphere\x00')
            assert flag0 == 0 and flag1 == 0 and flag2 == 0
            std = std[:content_ofs + 0x20 * i] + pk(
                '<IIBBBBI', start, size, cnt_type, flag0 | 0x1, flag1, flag2,
                pad) + std[content_ofs + 0x20 * i + 0x10:]

    return std
示例#9
0
def parse_dir(off, path = ''):
    global dirs, files
    dirs.seek(off)
    (sibling, child, file, hsh, namelen) = up('<IIIII', dirs.read(0x14))
    name = read_filename(dirs, off + 0x14, namelen)
    if path:
        newpath = '%s%s/' % (path, name)
    else:
        newpath = '%s/' % name
    if file != 0xFFFFFFFF:
        parse_file(file, newpath)
    if sibling != 0xFFFFFFFF:
        parse_dir(sibling, path)
    if child != 0xFFFFFFFF:
        parse_dir(child, newpath)
示例#10
0
def nextBlock(f):
    rb = f.read(1)  #read current byte from file
    b = up('B', rb)[0]

    while b != 0x3B:  #while the byte is not the file end [trailer(0x3b)]
        buf = ''
        buf += rb
        if b == 0x2c:  #image block
            nbuf = f.read(2 * 4)  #read the image block header.
            eb = f.read(1)  #block flags
            assert (up('B', eb)[0]
                    & 0x03) == 0, ''  #local color table, interlace flag
            nbuf += eb  #saving LZW minimus code size end of flag

            nbuf += f.read(1)
            nbuf += getBlockData(f)  #concat image data
            t = T.image
        elif b == 0x21:  #non image block
            rb = f.read(1)  #graphics control label
            buf += rb
            b = up('B', rb)[0]

            #checking block type
            if b == 0xF9:  #graphic control extension block
                nbuf = f.read(1)  #block size
                blksize = up('B', nbuf)[0]
                nbuf += f.read(blksize)  #reading the rest of the block
                nbuf += f.read(1)  #read block termintor
                assert nbuf[-1] == '\x00', ''  #verifiying the size was OK
                t = T.graphicsControl
            elif b in [0xFF, 0x01]:  #plain text or application extension block
                nbuf = f.read(1)  #block size
                blksize = up('B', nbuf)[0]
                nbuf += f.read(blksize)  #read the block
                nbuf += getBlockData(f)  #read the block data

                t = (b +
                     3) & 0x0F  #block type is appplicationBlock or plaintext
            elif b == 0xFE:  #comment extension block
                nbuf = getBlockData(f)

                t = T.commentBlock
            else:  #unsupported grapic control label
                raise Exception("unsupprted thing @{}".format(f.tell()))

        else:
            print(f.tell(), b)

        buf += nbuf

        yield t, buf  #yield the block type and content
        rb = f.read(1)  #read type again
        b = up('B', rb)[0]

    yield None, '\x3B'  #EOF
示例#11
0
def find_entrypoint(data):
    if data[0x4000:0x4004] == 'PK11':
        ofs = 0x4000
    elif data[0x7000:0x7004] == 'PK11':
        ofs = 0x7000
    elif data[0x7170:0x7174] == 'PK11':
        ofs = 0x7170
    else:
        raise ValueError('No PK11 Header?')
    for i in xrange(4, 0x20, 4):
        semo_maybe = ofs + 0x20 + up('<I', data[ofs + i:ofs + i + 4])[0]
        if 4 <= semo_maybe and semo_maybe <= len(data) - 4:
            if data[semo_maybe - 4:semo_maybe +
                    4] == '\x00\x00\x00\x00\xDF\x4F\x03\xD5':
                return semo_maybe
    raise ValueError('Failed to find SecMon')
示例#12
0
def main(argc, argv):
    if argc != 4:
        print('Usage: %s kernel_ldr.bin kernel.bin output.bin' % argv[0])
        return 1
    with open(argv[1], 'rb') as f:
        kernel_ldr = f.read()
    with open(argv[2], 'rb') as f:
        kernel = f.read()
    kernel_metadata_offset = 4
    assert (kernel_metadata_offset <= len(kernel) - 0x40)
    assert (kernel[kernel_metadata_offset:kernel_metadata_offset + 4] == b'MSS0')

    bss_start, bss_end, kernel_end = up('<III', kernel[kernel_metadata_offset + 0x30:kernel_metadata_offset + 0x3C])
    assert (bss_end >= bss_start)
    assert (bss_end == kernel_end)

    assert (len(kernel) <= kernel_end)
    if len(kernel) < kernel_end:
        kernel += b'\x00' * (kernel_end - len(kernel))
    assert (kernel_end == len(kernel))

    embedded_ini = b''
    try:
        with open('ini.bin', 'rb') as f:
            embedded_ini = f.read()
    except:
        pass
    embedded_ini_offset = align_up(kernel_end, 0x1000)
    embedded_ini_end = embedded_ini_offset + len(embedded_ini) # TODO: Create and embed an INI, eventually.

    kernel_ldr_offset = align_up(embedded_ini_end, 0x1000) + (0x1000 if len(embedded_ini) == 0 else 0)
    kernel_ldr_end    = kernel_ldr_offset + len(kernel_ldr)
    mesosphere_end    = align_up(kernel_ldr_end, 0x1000)

    with open(argv[3], 'wb') as f:
        f.write(kernel[:kernel_metadata_offset + 4])
        f.write(pk('<QQI', embedded_ini_offset, kernel_ldr_offset, atmosphere_target_firmware(13, 0, 0)))
        f.write(kernel[kernel_metadata_offset + 0x18:])
        f.seek(embedded_ini_offset)
        f.write(embedded_ini)
        f.seek(embedded_ini_end)
        f.seek(kernel_ldr_offset)
        f.write(kernel_ldr)
        f.seek(mesosphere_end)
        f.write(b'\x00'*0x1000)
    return 0
示例#13
0
def main(argc, argv):
    if argc != 1:
        print('Usage: %s' % argv[0])
        return 1
    with open('kernel_ldr/kernel_ldr.bin', 'rb') as f:
        kernel_ldr = f.read()
    with open('kernel/kernel.bin', 'rb') as f:
        kernel = f.read()
    kernel_metadata_offset = 4
    assert (kernel_metadata_offset <= len(kernel) - 0x40)
    assert (kernel[kernel_metadata_offset:kernel_metadata_offset +
                   4] == b'MSS0')
    kernel_end = up(
        '<I',
        kernel[kernel_metadata_offset + 0x38:kernel_metadata_offset + 0x3C])[0]
    assert (kernel_end >= len(kernel))

    embedded_ini = b''
    try:
        with open('ini.bin', 'rb') as f:
            embedded_ini = f.read()
    except:
        pass
    embedded_ini_offset = align_up(kernel_end, 0x1000) + 0x1000
    embedded_ini_end = embedded_ini_offset + len(
        embedded_ini)  # TODO: Create and embed an INI, eventually.

    kernel_ldr_offset = align_up(embedded_ini_end, 0x1000) + 0x1000
    kernel_ldr_end = kernel_ldr_offset + len(kernel_ldr)
    mesosphere_end = align_up(kernel_ldr_end, 0x1000)

    with open('mesosphere.bin', 'wb') as f:
        f.write(kernel[:kernel_metadata_offset + 4])
        f.write(
            pk('<QQI', embedded_ini_offset, kernel_ldr_offset,
               atmosphere_target_firmware(10, 1, 0)))
        f.write(kernel[kernel_metadata_offset + 0x18:])
        f.seek(embedded_ini_offset)
        f.write(embedded_ini)
        f.seek(embedded_ini_end)
        f.seek(kernel_ldr_offset)
        f.write(kernel_ldr)
        f.seek(mesosphere_end)
        f.write(b'\x00' * 0x1000)
    return 0
示例#14
0
 def hook_mem_rw(uc, access, address, size, value, self):
     if access == UC_MEM_READ and self.smc_lists == []:
         sp = uc.reg_read(UC_ARM64_REG_SP)
         if sp <= address and address <= (sp + 0xFFF) & ~0xFFF:
             return
         value = uc.mem_read(address, size)
         value = int(str(value)[::-1].encode('hex'), 16)
         self.tracked_reads.append((address, value))
         if value == 0x84000002:
             expected = (address - 0x20)
             smc_list_reads = filter(lambda x: x[1] == expected,
                                     self.tracked_reads)
             assert len(smc_list_reads) == 1
             self.smc_lists_addr = smc_list_reads[0][0] - 0x10
             for i in xrange(2):
                 self.smc_lists.append(
                     up('<QII',
                        uc.mem_read(self.smc_lists_addr + 0x10 * i, 0x10)))
示例#15
0
def parse_dir(off, path=''):
    global archive, dirs_off, files_off, fdata_off, out_dir
    archive.seek(dirs_off + off)
    (sibling, child, file, hsh, namelen) = up('<IIIII', archive.read(0x14))
    name = read_filename(archive, dirs_off + off + 0x14, namelen)
    if path:
        newpath = '%s%s/' % (path, name)
    else:
        newpath = '%s/' % name
    dirp = os.path.dirname(os.path.join(out_dir, newpath[1:])).replace(
        '/', os.path.sep)
    if not os.path.exists(dirp):
        os.mkdir(dirp)
    if file != 0xFFFFFFFF:
        parse_file(file, newpath)
    if sibling != 0xFFFFFFFF:
        parse_dir(sibling, path)
    if child != 0xFFFFFFFF:
        parse_dir(child, newpath)
def extract(meta, data):
    fileIndex = -1
    meta_data = meta.read()
    entries = [
        up('<QQQ?', entry[:0x19])
        for entry in iterate(meta_data, 0,
                             len(meta_data) / 0x20, 0x20)
    ]
    for (offset, uncompressed_size, compressed_size, compressed) in entries:
        fileIndex = fileIndex + 1
        if compressed_size == 0:
            continue
        print 'Saving data from %d' % fileIndex
        data.seek(offset)
        cur_data = data.read(compressed_size)
        with open('out/%d.bin' % (fileIndex, ), 'wb') as f:
            if compressed:
                uncompress_to_file(f, cur_data)
            else:
                f.write(cur_data)
示例#17
0
 def hook_code(uc, address, size, self):
     assert size == 4
     insn = up('<I', uc.mem_read(address, 4))[0]
     self.restore_instructions(uc)
     if insn in self.invalid_instructions:
         self.patch_instruction(uc, address,
                                pk('<I', self.invalid_instructions[insn]))
     if self.calling_function:
         self.add_function_call(uc, address)
         self.calling_function = False
     if (insn & 0xFC000000) in [0x94000000]:
         self.calling_function = True
     if (insn & 0xFFFFFFE0) == 0xD51E1000 and len(self.virt_to_phys) == 0:
         mmu_funcs = [
             func for func in self.function_calls if self.is_mmu_func(func)
         ]
         assert len(mmu_funcs) == 1
         mmu_func = mmu_funcs[0]
         for call in self.function_calls[mmu_func]:
             l3_tab, vaddr, paddr, size, attr = tuple(call[:5])
             self.l3_table = l3_tab
             if vaddr != paddr:
                 assert vaddr >= 0x80000000
                 uc.mem_map(vaddr, size)
                 mem = uc.mem_read(paddr, size)
                 uc.mem_write(vaddr, str(mem))
                 self.virt_to_phys[vaddr] = (paddr, size, attr)
                 self.phys_to_virt[paddr] = (vaddr, size, attr)
                 self.mappings.append((vaddr, paddr, size, attr))
     if (insn & 0xFFFFFFE0) == 0xD51EC000:
         # VBAR set
         which = insn & 0x1F
         self.vbar = uc.reg_read(Emulator.REGISTER_IDS[which])
     if (insn & 0xFFFFFFE0) == 0xD51E2000:
         # TTBR set
         which = insn & 0x1F
         self.ttbr = uc.reg_read(Emulator.REGISTER_IDS[which])
         assert (self.ttbr & 0x7FF) == 0x7C0
示例#18
0
def meme_aes_encrypt(key, buf):
    temp = '\x00' * 0x10
    output = ''
    num_blocks = len(buf) / 0x10

    # Phase 1: CBC encrypt
    for i in xrange(num_blocks):
        block = buf[i * 0x10:(i + 1) * 0x10]
        temp = aes_ecb_enc(key, sxor(temp, block))
        output += temp

    # Phase 2: Shitty CMAC
    temp = sxor(temp, output[:0x10])
    subkey = [0] * 0x10
    for i in xrange(0, 0x10, 2):
        b1, b2 = up('<BB', temp[i:i + 2])
        subkey[i + 0] = (2 * b1 + (b2 >> 7)) & 0xFF
        subkey[i + 1] = (2 * b2) & 0xFF
        if (i + 2 < 0x10):
            subkey[i + 1] += ord(temp[i + 2]) >> 7
            subkey[i + 1] &= 0xFF
    if ord(temp[0]) & 0x80:
        subkey[0xF] ^= 0x87
    subkey = ''.join(map(chr, subkey))

    output = sxor(output, subkey * num_blocks)

    # Phase 3: Custom AES mode
    temp = '\x00' * 0x10
    for i in xrange(num_blocks):
        block_ofs = (num_blocks - 1 - i) * 0x10
        block = output[block_ofs:block_ofs + 0x10]
        output = output[:block_ofs] + sxor(aes_ecb_enc(key, block),
                                           temp) + output[block_ofs + 0x10:]
        temp = block

    return output
示例#19
0
    def __init__(self, fileobj):
        f = BinFile(fileobj)

        crt0 = bytes(f.read(0x2000))
        kmap = -1
        for mapoff in iter_range(0, len(crt0) - 0x30, 4):
            if is_valid_kernel_map(crt0, mapoff):
                textOffset, textEndOffset, rodataOffset, rodataEndOffset, \
                dataOffset, dataEndOffset, bssOffset, bssEndOffset, ini1Offset, \
                dynamicOffset, initArrayOffset, initArrayEndOffset = up("<12I", crt0[mapoff:mapoff+0x30])
                f.seek(ini1Offset)
                if bytes(f.read(4)) == b'INI1' or (0x100000 <= ini1Offset and ini1Offset <= 0x400000):
                    kmap = mapoff
                    break
            elif mapoff <= len(crt0) - 0x58 and is_valid_kernel_map_5x(crt0, mapoff):
                textOffset, textEndOffset, rodataOffset, rodataEndOffset, \
                dataOffset, dataEndOffset, bssOffset, bssEndOffset, ini1Offset, \
                dynamicOffset, corelocalOffset = up("<11Q", crt0[mapoff:mapoff+0x58])
                kmap = mapoff
                break
        f.seek(0)
        assert kmap != -1

        b = 0x80060000
        self.textoff    = textOffset
        self.textsize   = textEndOffset - textOffset
        self.rodataoff  = rodataOffset
        self.rodatasize = rodataEndOffset - rodataOffset
        self.dataoff    = dataOffset
        self.datasize   = dataEndOffset - dataOffset
        flatsize = self.dataoff + self.datasize


        self.binfile = f

        self.dynamicoff = dynamicOffset
        self.bssoff     = bssOffset
        self.bssend     = bssEndOffset

        self.bsssize = self.bssend - self.bssoff

        self.segment_builder = builder = SegmentBuilder()
        for off,sz,name,kind in [
            (self.textoff, self.textsize, ".text", "CODE"),
            (self.rodataoff, self.rodatasize, ".rodata", "CONST"),
            (self.dataoff, self.datasize, ".data", "DATA"),
            (self.bssoff, self.bsssize, ".bss", "BSS"),
        ]:
            builder.add_segment(off, sz, name, kind)

        # read dynamic
        self.armv7 = False#(f.read_from('Q', self.dynamicoff) > 0xFFFFFFFF or f.read_from('Q', self.dynamicoff+0x10) > 0xFFFFFFFF)
        self.offsize = 4 if self.armv7 else 8

        f.seek(self.dynamicoff)
        self.dynamic = dynamic = {}
        for i in MULTIPLE_DTS:
            dynamic[i] = []
        for i in iter_range((flatsize - self.dynamicoff) // 0x10):
            tag, val = f.read('II' if self.armv7 else 'QQ')
            if tag == DT_NULL:
                break
            if tag in MULTIPLE_DTS:
                dynamic[tag].append(val)
            else:
                dynamic[tag] = val
        dynamicend = f.tell()
        builder.add_section('.dynamic', self.dynamicoff, dynamicend)

        # read .dynstr
        if DT_STRTAB in dynamic and DT_STRSZ in dynamic:
            f.seek(dynamic[DT_STRTAB])
            self.dynstr = f.read(dynamic[DT_STRSZ])
        else:
            self.dynstr = b'\x00'
            print('warning: no dynstr')

        for startkey, szkey, name in [
            (DT_STRTAB, DT_STRSZ, '.dynstr'),
            (DT_INIT_ARRAY, DT_INIT_ARRAYSZ, '.init_array'),
            (DT_FINI_ARRAY, DT_FINI_ARRAYSZ, '.fini_array'),
            (DT_RELA, DT_RELASZ, '.rela.dyn'),
            (DT_REL, DT_RELSZ, '.rel.dyn'),
            (DT_JMPREL, DT_PLTRELSZ, ('.rel.plt' if self.armv7 else '.rela.plt')),
        ]:
            if startkey in dynamic and szkey in dynamic:
                builder.add_section(name, dynamic[startkey], size=dynamic[szkey])

        self.needed = [self.get_dynstr(i) for i in self.dynamic[DT_NEEDED]]

        # load .dynsym
        self.symbols = symbols = []
        f.seek(dynamic[DT_SYMTAB])
        while True:
            if dynamic[DT_SYMTAB] < dynamic[DT_STRTAB] and f.tell() >= dynamic[DT_STRTAB]:
                break
            if self.armv7:
                st_name, st_value, st_size, st_info, st_other, st_shndx = f.read('IIIBBH')
            else:
                st_name, st_info, st_other, st_shndx, st_value, st_size = f.read('IBBHQQ')
            if st_name > len(self.dynstr):
                break
            symbols.append(ElfSym(self.get_dynstr(st_name), st_info, st_other, st_shndx, st_value, st_size))
        builder.add_section('.dynsym', dynamic[DT_SYMTAB], end=f.tell())

        # .hash, .gnu.hash
        if DT_HASH in dynamic and DT_GNU_HASH in dynamic and DT_SYMTAB in dynamic:
            builder.add_section('.hash', dynamic[DT_HASH], end=dynamic[DT_GNU_HASH])
            builder.add_section('.gnu.hash', dynamic[DT_GNU_HASH], end=dynamic[DT_SYMTAB])

        self.plt_entries = []
        self.relocations = []
        locations = set()
        if DT_REL in dynamic:
            locations |= self.process_relocations(f, symbols, dynamic[DT_REL], dynamic[DT_RELSZ])

        if DT_RELA in dynamic:
            locations |= self.process_relocations(f, symbols, dynamic[DT_RELA], dynamic[DT_RELASZ])

        if DT_JMPREL in dynamic:
            # Note: 5.0 kernel doesn't have it
            pltlocations = self.process_relocations(f, symbols, dynamic[DT_JMPREL], dynamic[DT_PLTRELSZ])
            locations |= pltlocations

            plt_got_start = min(pltlocations)
            plt_got_end = max(pltlocations) + self.offsize
            if DT_PLTGOT in dynamic:
                builder.add_section('.got.plt', dynamic[DT_PLTGOT], end=plt_got_end)

            if not self.armv7:
                f.seek(0)
                text = f.read(self.textsize)
                last = 12
                while True:
                    pos = text.find(struct.pack('<I', 0xD61F0220), last)
                    if pos == -1: break
                    last = pos+1
                    if (pos % 4) != 0: continue
                    off = pos - 12
                    a, b, c, d = struct.unpack_from('<IIII', text, off)
                    if d == 0xD61F0220 and (a & 0x9f00001f) == 0x90000010 and (b & 0xffe003ff) == 0xf9400211:
                        base = off & ~0xFFF
                        immhi = (a >> 5) & 0x7ffff
                        immlo = (a >> 29) & 3
                        paddr = base + ((immlo << 12) | (immhi << 14))
                        poff = ((b >> 10) & 0xfff) << 3
                        target = paddr + poff
                        if plt_got_start <= target < plt_got_end:
                            self.plt_entries.append((off, target))
                builder.add_section('.plt', min(self.plt_entries)[0], end=max(self.plt_entries)[0] + 0x10)

            # try to find the ".got" which should follow the ".got.plt"
            good = False
            got_end = plt_got_end + self.offsize
            while got_end in locations and (DT_INIT_ARRAY not in dynamic or got_end < dynamic[DT_INIT_ARRAY]):
                good = True
                got_end += self.offsize

            if good:
                builder.add_section('.got', plt_got_end, end=got_end)
        else:
            # .got is between .dynamic and .init_array on 5.0
            builder.add_section('.got', dynamicend, dynamic[DT_INIT_ARRAY])
        self.sections = []
        for start, end, name, kind in builder.flatten():
            self.sections.append((start, end, name, kind))
示例#20
0
def is_valid_kernel_map_5x(dat, ofs):
    ts, te, rs, re, ds, de, bs, be, i1, dn, cl = up('<QQQQQQQQQQQ', dat[ofs:ofs+0x58])
    return is_valid_kernel_map_impl(ts, te, rs, re, ds, de, bs, be, i1, dn)
示例#21
0
def is_valid_kernel_map(dat, ofs):
    ts, te, rs, re, ds, de, bs, be, i1, dn, ns, ne = up('<IIIIIIIIIIII', dat[ofs:ofs+0x30])
    return is_valid_kernel_map_impl(ts, te, rs, re, ds, de, bs, be, i1, dn)
示例#22
0
def find_flags(full, num_fields):
    KNOWN = '\x00' + ('\x01' * 6) + '\x00\x01\x01\x00'
    if num_fields < 443 + len(KNOWN):
        return [0] * num_fields
    ind = full.index(KNOWN) - 443
    return list(up('<' + 'B' * num_fields, full[ind:ind + num_fields]))
示例#23
0
def find_categories(full, num_fields):
    KNOWN = [0] * 10 + [1] * 2 + [0x3B] * 2
    ind = full.index(''.join(pk('<I', i) for i in KNOWN))
    return list(up('<' + 'I' * num_fields, full[ind:ind + 4 * num_fields]))
示例#24
0
文件: nso.py 项目: zwluoqi/Tinfoil
def read_u8(fp, off):
    return up('<B', read_at(fp, off, 1))[0]
示例#25
0
def read_u64(fp, off):
    return up('<Q', read_at(fp, off, 8))[0]
示例#26
0
def read_u8(fp, off):
    return up('<B', read_at(fp, off, 1))[0]
示例#27
0
文件: nso.py 项目: zwluoqi/Tinfoil
def read_u32(fp, off):
    return up('<I', read_at(fp, off, 4))[0]
示例#28
0
def read_u16(fp, off):
    return up('<H', read_at(fp, off, 2))[0]
示例#29
0
文件: nso.py 项目: zwluoqi/Tinfoil
def read_u64(fp, off):
    return up('<Q', read_at(fp, off, 8))[0]
示例#30
0
def read_u32(fp, off):
    return up('<I', read_at(fp, off, 4))[0]
示例#31
0
 def get_dword(z, target):
     return up('<I', z[target:target + 4])[0]
示例#32
0
文件: nso.py 项目: fantleas/tinfoil2
def repack_nso(path_original, path_patch, path_out):
    nso = b''

    patched_text_hash = b''

    # Read the original NSO
    with open(path_original, 'rb') as f:
        nso = bytearray(f.read())

    # Read the patched text
    with open(path_patch, 'rb') as f:
        data = f.read()
        patched_text_hash = sha256(data)
        compressed_patched_text = lz4.block.compress(data, store_size=False)

    text_off = up('<I', nso[0x10:0x14])[0]
    text_compressed_size = len(compressed_patched_text)

    # Retrieve original rodata segment
    rodata_off = up('<I', nso[0x20:0x24])[0]
    rodata_compressed_size = up('<I', nso[0x64:0x68])[0]
    compressed_rodata = nso[rodata_off:rodata_off + rodata_compressed_size]

    # Retrieve original data segment
    data_off = up('<I', nso[0x30:0x34])[0]
    data_compressed_size = up('<I', nso[0x68:0x6C])[0]
    compressed_data = nso[data_off:data_off + data_compressed_size]

    # Set to the offsets of the output nso
    rodata_off = text_off + text_compressed_size
    data_off = rodata_off + rodata_compressed_size

    # Create the output nso
    out_nso = bytearray(data_off + data_compressed_size)

    # Copy over the original header
    out_nso[0x0:0x100] = nso[0x0:text_off]

    # Write the new text hash
    out_nso[0xA0:0xC0] = patched_text_hash

    # Write the new compressed text size
    out_nso[0x60:0x64] = pk('<I', text_compressed_size)

    # Correct the header offsets
    out_nso[0x20:0x24] = pk('<I', rodata_off)  # rodata offset
    out_nso[0x30:0x34] = pk('<I', data_off)  # data offset

    # Write new data
    out_nso[text_off:text_off + text_compressed_size] = compressed_patched_text
    out_nso[rodata_off:rodata_off + rodata_compressed_size] = compressed_rodata
    out_nso[data_off:data_off + data_compressed_size] = compressed_data

    print('text:')

    print('\nrodata:')
    print('Offset                    {}'.format(rodata_off))
    print('len(compressed_rodata)    {}'.format(len(compressed_rodata)))
    print('Original header size      {}'.format(rodata_compressed_size))

    print('\ndata:')
    print('Offset                    {}'.format(data_off))
    print('len(compressed_rodata)    {}'.format(len(compressed_data)))
    print('Original header size      {}'.format(data_compressed_size))

    with open(path_out, 'wb') as f:
        f.write(out_nso)
示例#33
0
文件: nso.py 项目: zwluoqi/Tinfoil
def read_u16(fp, off):
    return up('<H', read_at(fp, off, 2))[0]
示例#34
0
    def message_record(self, message, timestamp):
        if self.thread.isRunning():
            dont_print = 0
            error = "0"
            sweep_count = 0
            oil_rh = 0
            s0_temp_post_ref = 0
            s0_temp_post_sample = 0
            s0_phase = 0
            s0_magnitude = 0
            s1_temp_post_ref = 0
            s1_temp_post_sample = 0
            s1_phase = 0
            s1_magnitude = 0
            s2_temp_post_sample = 0
            s2_phase = 0
            s2_magnitude = 0
            s3_temp_post_sample = 0
            s3_phase = 0
            s3_magnitude = 0
            s4_temp_post_sample = 0
            s4_phase = 0
            s4_magnitude = 0

            if len(message) == 77:
                sweep_count = up('<H', message[0:2])[0]
                oil_rh = up('<H', message[2:4])[0] / 100
                s0_temp_post_ref = (up('<f', message[4:8])[0])
                s0_temp_post_sample = (up('<f', message[8:12])[0])
                s0_magnitude = (up('<f', message[12:16])[0])
                s0_phase = (up('<f', message[16:20])[0])
                s1_temp_post_ref = (up('<f', message[20:24])[0])
                s1_temp_post_sample = (up('<f', message[24:28])[0])
                s1_magnitude = (up('<f', message[28:32])[0])
                s1_phase = (up('<f', message[32:36])[0])
                s2_temp_post_sample = (up('<f', message[36:40])[0])
                s2_magnitude = (up('<f', message[40:44])[0])
                s2_phase = (up('<f', message[44:48])[0])
                s3_temp_post_sample = (up('<f', message[48:52])[0])
                s3_magnitude = (up('<f', message[52:56])[0])
                s3_phase = (up('<f', message[56:60])[0])
                s4_temp_post_sample = (up('<f', message[60:64])[0])
                s4_magnitude = (up('<f', message[64:68])[0])
                s4_phase = (up('<f', message[68:72])[0])
                #self.log_values.emit([sweep,oil_rh,s0_temp_post_sample,s0_phase,s0_magnitude])
                self.log_message.emit("count:" + str(sweep_count) + " RH:" +
                                      str(oil_rh) + " temp:" +
                                      str(s0_temp_post_sample) + " phase:" +
                                      str(s0_phase) + " mag:" +
                                      str(s0_magnitude))
            elif len(message) == 4:
                self.log_message.emit("sensor reports error code" +
                                      str(message))
                error = str(message)
            else:
                self.log_message.emit("Canbus RX Thread Error")
                dont_print = 1

            if dont_print == 0:
                ts_formatted = datetime.datetime.fromtimestamp(
                    timestamp).strftime('%Y-%m-%d %H:%M:%S')
                with open(self.file_name, 'a') as f:
                    f.write(
                        "{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{}\n"
                        .format(ts_formatted, sweep_count, oil_rh,
                                s0_temp_post_ref, s0_temp_post_sample,
                                s0_magnitude, s0_phase, s1_temp_post_ref,
                                s1_temp_post_sample, s1_magnitude, s1_phase,
                                s2_temp_post_sample, s2_magnitude, s2_phase,
                                s3_temp_post_sample, s3_magnitude, s3_phase,
                                s4_temp_post_sample, s4_magnitude, s4_phase,
                                error))
示例#35
0
文件: nso.py 项目: zwluoqi/Tinfoil
def repack_nso(path_original, path_patch, path_out):
    nso = b''

    patched_text_hash = b''

    # Read the original NSO
    with open(path_original, 'rb') as f:
        nso = bytearray(f.read())

    # Read the patched text
    with open(path_patch, 'rb') as f:
        data = f.read()
        patched_text_hash = sha256(data)
        compressed_patched_text = lz4.block.compress(data, store_size=False)


    text_off = up('<I', nso[0x10:0x14])[0]
    text_compressed_size = len(compressed_patched_text)

    # Retrieve original rodata segment
    rodata_off = up('<I', nso[0x20:0x24])[0]
    rodata_compressed_size = up('<I', nso[0x64:0x68])[0]
    compressed_rodata = nso[rodata_off:rodata_off+rodata_compressed_size]

    # Retrieve original data segment
    data_off = up('<I', nso[0x30:0x34])[0]
    data_compressed_size = up('<I', nso[0x68:0x6C])[0]
    compressed_data = nso[data_off:data_off+data_compressed_size]

    # Set to the offsets of the output nso
    rodata_off = text_off + text_compressed_size
    data_off = rodata_off + rodata_compressed_size

    # Create the output nso
    out_nso = bytearray(data_off + data_compressed_size)

    # Copy over the original header
    out_nso[0x0:0x100] = nso[0x0:text_off]

    # Write the new text hash
    out_nso[0xA0:0xC0] = patched_text_hash

    # Write the new compressed text size
    out_nso[0x60:0x64] = pk('<I', text_compressed_size)

    # Correct the header offsets
    out_nso[0x20:0x24] = pk('<I', rodata_off) # rodata offset
    out_nso[0x30:0x34] = pk('<I', data_off) # data offset

    # Write new data
    out_nso[text_off:text_off+text_compressed_size] = compressed_patched_text
    out_nso[rodata_off:rodata_off+rodata_compressed_size] = compressed_rodata
    out_nso[data_off:data_off+data_compressed_size] = compressed_data

    print('text:')

    print('\nrodata:')
    print('Offset                    {}'.format(rodata_off))
    print('len(compressed_rodata)    {}'.format(len(compressed_rodata)))
    print('Original header size      {}'.format(rodata_compressed_size))

    print('\ndata:')
    print('Offset                    {}'.format(data_off))
    print('len(compressed_rodata)    {}'.format(len(compressed_data)))
    print('Original header size      {}'.format(data_compressed_size))

    with open(path_out, 'wb') as f:
        f.write(out_nso)