Esempio n. 1
0
def _process_reg_file(ubifs, inode, path):
    try:
        buf = ''
        if 'data' in inode:
            compr_type = 0
            sorted_data = sorted(inode['data'], key=lambda x: x.key['khash'])
            last_khash = sorted_data[0].key['khash'] - 1
            for data in sorted_data:

                # If data nodes are missing in sequence, fill in blanks
                # with \x00 * UBIFS_BLOCK_SIZE
                if data.key['khash'] - last_khash != 1:
                    while 1 != (data.key['khash'] - last_khash):
                        buf += '\x00' * UBIFS_BLOCK_SIZE
                        last_khash += 1

                compr_type = data.compr_type
                ubifs.file.seek(data.offset)
                d = ubifs.file.read(data.compr_len)
                buf += decompress(compr_type, data.size, d)
                last_khash = data.key['khash']
                verbose_log(
                    _process_reg_file,
                    'ino num: %s, compression: %s, path: %s' %
                    (inode['ino'].key['ino_num'], compr_type, path))

    except Exception, e:
        error(_process_reg_file, 'Warn',
              'inode num:%s :%s' % (inode['ino'].key['ino_num'], e))
Esempio n. 2
0
def _process_reg_file(ubifs, inode, path):
    try:
        buf = b''
        if 'data' in inode:
            compr_type = 0
            sorted_data = sorted(inode['data'], key=lambda x: x.key['khash'])
            last_khash = sorted_data[0].key['khash']-1

            for data in sorted_data:
                
                # If data nodes are missing in sequence, fill in blanks
                # with \x00 * UBIFS_BLOCK_SIZE
                if data.key['khash'] - last_khash != 1:
                    while 1 != (data.key['khash'] - last_khash):
                        buf += b'\x00'*UBIFS_BLOCK_SIZE
                        last_khash += 1

                compr_type = data.compr_type
                ubifs.file.seek(data.offset)
                d = ubifs.file.read(data.compr_len)
                buf += decompress(compr_type, data.size, d)
                last_khash = data.key['khash']
                verbose_log(_process_reg_file, 'ino num: %s, compression: %s, path: %s' % (inode['ino'].key['ino_num'], compr_type, path))

    except Exception as e:
        error(_process_reg_file, 'Warn', 'inode num:%s :%s' % (inode['ino'].key['ino_num'], e))
    
    # Pad end of file with \x00 if needed.
    if inode['ino'].size > len(buf):
        buf += b'\x00' * (inode['ino'].size - len(buf))
        
    return buf
Esempio n. 3
0
    def _process_reg_file(ubifs, inode, path):
        try:
            buf = b''
            if 'data' in inode:
                compr_type = 0
                sorted_data = sorted(inode['data'],
                                     key=lambda x: x.key['khash'])
                last_khash = sorted_data[0].key['khash'] - 1

                for data in sorted_data:

                    # If data nodes are missing in sequence, fill in blanks
                    # with \x00 * UBIFS_BLOCK_SIZE
                    if data.key['khash'] - last_khash != 1:
                        while 1 != (data.key['khash'] - last_khash):
                            buf += b'\x00' * UBIFS_BLOCK_SIZE
                            last_khash += 1

                    compr_type = data.compr_type
                    ubifs.file.seek(data.offset)
                    d = ubifs.file.read(data.compr_len)
                    buf += decompress(compr_type, data.size, d)
                    last_khash = data.key['khash']
                    # verbose_log(_process_reg_file,
                    #             'ino num: %s, compression: %s, path: %s' % (inode['ino'].key['ino_num'], compr_type, path))

        except Exception as e:
            print('_process_reg_file inode num:%s :%s' %
                  (inode['ino'].key['ino_num'], e))

        # Pad end of file with \x00 if needed.
        if inode['ino'].size > len(buf):
            buf += b'\x00' * (inode['ino'].size - len(buf))

        return buf