Ejemplo n.º 1
0
    def get_stream(self, sid, slack=False):
        """Retrieves the contents of a stream.

        :type sid: ``int``
        :param sid: The stream identifier for the directory entry associated
                    with the stream.

        :type slack: ``bool``
        :param slack: If ``True``, the contents of the entire stream are
                      returned.  Otherwise the stream is truncated at the size
                      specified by the associated directory entry.

        :raises IndexError: If :attr:`sid` is out of range.

        :rtype: :class:`lf.dec.IStream`
        :returns: An :class:`lf.dec.IStream` covering the contents of the
                  stream.

        """
        dir_entry = self.get_dir_entry(sid)

        if sid == 0:
            try:
                chain = self.get_fat_chain(dir_entry.stream_sect_offset)
            except IndexError:
                return ByteIStream(b"")
            # end try

            byte_offset = self.byte_offset
            sect_size = self.sect_size
            stream = self.cfb_stream
        elif dir_entry.stream_size < self.mini_stream_cutoff:
            try:
                chain = self.get_mini_fat_chain(dir_entry.stream_sect_offset)
            except IndexError:
                return ByteIStream(b"")
            # end try

            byte_offset = self.mini_byte_offset
            sect_size = self.mini_sect_size
            stream = self.mini_stream
        else:
            try:
                chain = self.get_fat_chain(dir_entry.stream_sect_offset)
            except IndexError:
                return ByteIStream(b"")
            # end try

            byte_offset = self.byte_offset
            sect_size = self.sect_size
            stream = self.cfb_stream
        # end if

        if len(chain) == 1:
            start = byte_offset(chain[0])
            if not slack:
                return SubsetIStream(stream, start, dir_entry.stream_size)
            else:
                return SubsetIStream(stream, start, sect_size)
        # end if

        runs = list()
        start = byte_offset(chain[0])
        prev_entry = chain[0]
        count = 1

        for entry in chain[1:]:
            if (entry - prev_entry) == 1:
                count += 1
            else:
                runs.append((start, count * sect_size))
                start = byte_offset(entry)
                count = 1
            # end if

            prev_entry = entry
        else:
            runs.append((start, count * sect_size))
        # end for

        segments = [(stream, run[0], run[1]) for run in runs]

        if slack:
            return CompositeIStream(segments)
        # end if

        stream_size = dir_entry.stream_size
        if self.ver_major == 0x3:
            stream_size = dir_entry.stream_size & 0x00000000FFFFFFFF
        # end if

        return SubsetIStream(CompositeIStream(segments), 0, stream_size)
Ejemplo n.º 2
0
    def __init__(self, stream, offset=None):
        """Initializes a :class:`CompoundFile` object.

        :type stream: :class:`lf.dec.IStream`
        :param stream: A stream covering the contents of the compound file.

        :type offset: ``int``
        :param offset: The start of the compound file in the stream.

        """
        byte_offset = self.byte_offset
        fat = list()
        mini_fat = list()
        di_fat = list()

        header = Header.from_stream(stream, offset)

        self.header = header
        self.ver_major = header.ver_major
        self.ver_minor = header.ver_minor

        sect_size = (1 << header.sect_shift)
        self.sect_size = sect_size

        self.mini_sect_size = (1 << header.mini_sect_shift)
        self.mini_stream_cutoff = header.mini_stream_cutoff
        self.di_fat = di_fat
        self.fat = fat
        self.mini_fat = mini_fat
        self.cfb_stream = stream
        stream_len = stream.size

        entries_per_sect = sect_size // 4
        max_sect = stream_len // sect_size

        # First thing is build the FAT.  To do this we need to build (and
        # parse) the double indirect FAT.

        # Gather all of the double indirect FAT entries into di_fat
        di_fat.extend(header.di_fat)
        if header.di_fat_sect_count and (header.di_fat_sect_offset < max_sect):
            di_fat_entries = di_fat_entry * entries_per_sect
            next_sect = header.di_fat_sect_offset

            while (next_sect <= MAX_REG_SECT) and (next_sect < max_sect):
                offset = (next_sect + 1) * sect_size
                stream.seek(offset, SEEK_SET)

                values = \
                    di_fat_entries.from_buffer_copy(stream.read(sect_size))
                di_fat.extend(values[:-1])  # Don't include next_sect in di_fat

                next_sect = values[-1]
            # end while
        # end if

        # Create a list of sector runs for the FAT (from di_fat)
        runs = list()
        start = byte_offset(di_fat[0])
        prev_entry = di_fat[0]
        count = 1

        for entry in di_fat[1:]:
            if (entry == FAT_EOC) or (entry == FAT_UNALLOC):
                break
            # end if

            if (entry - prev_entry) == 1:
                count += 1
            else:
                runs.append((start, count * sect_size))
                start = byte_offset(entry)
                count = 1
            # end if

            prev_entry = entry
        # end for

        runs.append((start, count * sect_size))

        # Create a composite stream of the FAT (so we can parse it)
        segments = [(stream, run[0], run[1]) for run in runs]
        fat_stream = CompositeIStream(segments)

        # Extract the entries of the FAT
        sect_count = fat_stream.size // sect_size
        fat_entries = fat_entry * entries_per_sect

        for sect_index in range(sect_count):
            fat_stream.seek(sect_index * sect_size, SEEK_SET)
            values = fat_entries.from_buffer_copy(fat_stream.read(sect_size))
            fat.extend(values)
        # end for


        # Create the mini fat
        if header.mini_fat_sect_count != 0:
            # First we need the sector chain
            mini_fat_chain = self.get_fat_chain(header.mini_fat_sect_offset)

            if len(mini_fat_chain) == 1:
                start = byte_offset(header.mini_fat_sect_offset)
                mini_fat_stream = SubsetIStream(stream, start, sect_size)
            else:
                # Create a list of sector runs from the mini fat chain
                runs = list()
                start = byte_offset(mini_fat_chain[0])
                prev_entry = mini_fat_chain[0]
                count = 1

                for entry in mini_fat_chain[1:]:
                    if (entry - prev_entry) == 1:
                        count += 1
                    else:
                        runs.append((start, count * sect_size))
                        start = byte_offset(entry)
                        count = 1
                    # end if

                    prev_entry = entry
                else:
                    runs.append((start, count * sect_size))
                # end for

                # Create a stream of the contents of the mini fat
                segments = [(stream, run[0], run[1]) for run in runs]
                mini_fat_stream = CompositeIStream(segments)
            # end if

            # Extract the contents of the mini fat from the mini fat stream
            sect_count = mini_fat_stream.size // sect_size
            mini_fat_entries = mini_fat_entry * entries_per_sect

            for sect_index in range(sect_count):
                mini_fat_stream.seek(sect_index * sect_size, SEEK_SET)
                data = mini_fat_stream.read(sect_size)
                values = mini_fat_entries.from_buffer_copy(data)
                mini_fat.extend(values)
            # end for
        # end if


        # Create the directory stream.  First we need the sector chain.
        dir_chain = self.get_fat_chain(header.dir_sect_offset)

        # Create a list of sector runs from the directory chain
        runs = list()
        start = byte_offset(dir_chain[0])
        count = 1

        for entry in dir_chain[1:]:
            if (entry - prev_entry) == 1:
                count += 1
            else:
                runs.append((start, count * sect_size))
                start = byte_offset(entry)
                count = 1
            # end if

            prev_entry = entry
        else:
            runs.append((start, count * sect_size))
        # end for

        # Create the dir_stream attribute
        segments = [(stream, run[0], run[1]) for run in runs]
        dir_stream = CompositeIStream(segments)
        self.dir_stream = dir_stream


        # Create the root directory
        max_dir_entry = dir_stream.size // 128

        if max_dir_entry > STREAM_ID_MAX:
            max_dir_entry = STREAM_ID_MAX
        # end if

        # Create the dir_entries attribute by traversing the rb-tree
        dir_entries = dict()
        for sid in range(max_dir_entry):
            dir_entries[sid] = DirEntry.from_stream(dir_stream, sid * 128)
        # end for

        self.root_dir_entry = dir_entries[0]
        self.dir_entries = dir_entries


        # Create the mini stream
        if header.mini_fat_sect_count:
            self.mini_stream = self.get_stream(0, slack=True)
        else:
            self.mini_stream = ByteIStream(b"")
Ejemplo n.º 3
0
    def __init__(self, stream, offset=None):
        """Initializes a :class:`CompoundFile` object.

        :type stream: :class:`lf.dec.IStream`
        :param stream: A stream covering the contents of the compound file.

        :type offset: ``int``
        :param offset: The start of the compound file in the stream.

        """
        byte_offset = self.byte_offset
        fat = list()
        mini_fat = list()
        di_fat = list()

        header = Header.from_stream(stream, offset)

        self.header = header
        self.ver_major = header.ver_major
        self.ver_minor = header.ver_minor

        sect_size = (1 << header.sect_shift)
        self.sect_size = sect_size

        self.mini_sect_size = (1 << header.mini_sect_shift)
        self.mini_stream_cutoff = header.mini_stream_cutoff
        self.di_fat = di_fat
        self.fat = fat
        self.mini_fat = mini_fat
        self.cfb_stream = stream
        stream_len = stream.size

        entries_per_sect = sect_size // 4
        max_sect = stream_len // sect_size

        # First thing is build the FAT.  To do this we need to build (and
        # parse) the double indirect FAT.

        # Gather all of the double indirect FAT entries into di_fat
        di_fat.extend(header.di_fat)
        if header.di_fat_sect_count and (header.di_fat_sect_offset < max_sect):
            di_fat_entries = di_fat_entry * entries_per_sect
            next_sect = header.di_fat_sect_offset

            while (next_sect <= MAX_REG_SECT) and (next_sect < max_sect):
                offset = (next_sect + 1) * sect_size
                stream.seek(offset, SEEK_SET)

                values = \
                    di_fat_entries.from_buffer_copy(stream.read(sect_size))
                di_fat.extend(values[:-1])  # Don't include next_sect in di_fat

                next_sect = values[-1]
            # end while
        # end if

        # Create a list of sector runs for the FAT (from di_fat)
        runs = list()
        start = byte_offset(di_fat[0])
        prev_entry = di_fat[0]
        count = 1

        for entry in di_fat[1:]:
            if (entry == FAT_EOC) or (entry == FAT_UNALLOC):
                break
            # end if

            if (entry - prev_entry) == 1:
                count += 1
            else:
                runs.append((start, count * sect_size))
                start = byte_offset(entry)
                count = 1
            # end if

            prev_entry = entry
        # end for

        runs.append((start, count * sect_size))

        # Create a composite stream of the FAT (so we can parse it)
        segments = [(stream, run[0], run[1]) for run in runs]
        fat_stream = CompositeIStream(segments)

        # Extract the entries of the FAT
        sect_count = fat_stream.size // sect_size
        fat_entries = fat_entry * entries_per_sect

        for sect_index in range(sect_count):
            fat_stream.seek(sect_index * sect_size, SEEK_SET)
            values = fat_entries.from_buffer_copy(fat_stream.read(sect_size))
            fat.extend(values)
        # end for

        # Create the mini fat
        if header.mini_fat_sect_count != 0:
            # First we need the sector chain
            mini_fat_chain = self.get_fat_chain(header.mini_fat_sect_offset)

            if len(mini_fat_chain) == 1:
                start = byte_offset(header.mini_fat_sect_offset)
                mini_fat_stream = SubsetIStream(stream, start, sect_size)
            else:
                # Create a list of sector runs from the mini fat chain
                runs = list()
                start = byte_offset(mini_fat_chain[0])
                prev_entry = mini_fat_chain[0]
                count = 1

                for entry in mini_fat_chain[1:]:
                    if (entry - prev_entry) == 1:
                        count += 1
                    else:
                        runs.append((start, count * sect_size))
                        start = byte_offset(entry)
                        count = 1
                    # end if

                    prev_entry = entry
                else:
                    runs.append((start, count * sect_size))
                # end for

                # Create a stream of the contents of the mini fat
                segments = [(stream, run[0], run[1]) for run in runs]
                mini_fat_stream = CompositeIStream(segments)
            # end if

            # Extract the contents of the mini fat from the mini fat stream
            sect_count = mini_fat_stream.size // sect_size
            mini_fat_entries = mini_fat_entry * entries_per_sect

            for sect_index in range(sect_count):
                mini_fat_stream.seek(sect_index * sect_size, SEEK_SET)
                data = mini_fat_stream.read(sect_size)
                values = mini_fat_entries.from_buffer_copy(data)
                mini_fat.extend(values)
            # end for
        # end if

        # Create the directory stream.  First we need the sector chain.
        dir_chain = self.get_fat_chain(header.dir_sect_offset)

        # Create a list of sector runs from the directory chain
        runs = list()
        start = byte_offset(dir_chain[0])
        count = 1

        for entry in dir_chain[1:]:
            if (entry - prev_entry) == 1:
                count += 1
            else:
                runs.append((start, count * sect_size))
                start = byte_offset(entry)
                count = 1
            # end if

            prev_entry = entry
        else:
            runs.append((start, count * sect_size))
        # end for

        # Create the dir_stream attribute
        segments = [(stream, run[0], run[1]) for run in runs]
        dir_stream = CompositeIStream(segments)
        self.dir_stream = dir_stream

        # Create the root directory
        max_dir_entry = dir_stream.size // 128

        if max_dir_entry > STREAM_ID_MAX:
            max_dir_entry = STREAM_ID_MAX
        # end if

        # Create the dir_entries attribute by traversing the rb-tree
        dir_entries = dict()
        for sid in range(max_dir_entry):
            dir_entries[sid] = DirEntry.from_stream(dir_stream, sid * 128)
        # end for

        self.root_dir_entry = dir_entries[0]
        self.dir_entries = dir_entries

        # Create the mini stream
        if header.mini_fat_sect_count:
            self.mini_stream = self.get_stream(0, slack=True)
        else:
            self.mini_stream = ByteIStream(b"")
Ejemplo n.º 4
0
    def test_get_stream(self):
        ae = self.assertEqual
        ar = self.assertRaises

        sample_doc = self.sample_doc
        blair_doc = self.blair_doc

        for sid in sample_doc.dir_entries:
            dir_entry = sample_doc.dir_entries[sid]
            first_sect = dir_entry.stream_sect_offset

            if sid == 0:
                chain = sample_doc.get_fat_chain(first_sect)
                stream = sample_doc.cfb_stream
                byte_offset = sample_doc.byte_offset
                sect_size = sample_doc.sect_size
            elif dir_entry.stream_size < sample_doc.mini_stream_cutoff:
                chain = sample_doc.get_mini_fat_chain(first_sect)
                stream = sample_doc.mini_stream
                byte_offset = sample_doc.mini_byte_offset
                sect_size = sample_doc.mini_sect_size
            else:
                chain = sample_doc.get_fat_chain(first_sect)
                stream = sample_doc.cfb_stream
                byte_offset = sample_doc.byte_offset
                sect_size = sample_doc.sect_size
            # end if

            chains = list()
            start_index = 0
            delta = 1
            counter = 1
            while counter < len(chain):
                if (chain[counter] - chain[start_index]) == delta:
                    delta += 1
                    counter += 1
                    continue
                else:
                    chains.append((chain[start_index], (counter-start_index)))
                    start_index = counter
                    delta = 1
                    counter += 1
                # end if
            else:
                chains.append((chain[start_index], (counter-start_index)))
            # end while

            segments = \
                [(stream, byte_offset(x[0]), x[1] * sect_size) for x in chains]

            slack = CompositeIStream(segments)
            noslack = SubsetIStream(slack, 0, dir_entry.stream_size)

            slack.seek(0, SEEK_SET)
            noslack.seek(0, SEEK_SET)

            ae(sample_doc.get_stream(sid, slack=True).read(), slack.read())
            ae(sample_doc.get_stream(sid, slack=False).read(), noslack.read())
        # end for


        for sid in blair_doc.dir_entries:
            dir_entry = blair_doc.dir_entries[sid]
            first_sect = dir_entry.stream_sect_offset

            if sid == 0:
                chain = blair_doc.get_fat_chain(first_sect)
                stream = blair_doc.cfb_stream
                byte_offset = blair_doc.byte_offset
                sect_size = blair_doc.sect_size
            elif dir_entry.stream_size < blair_doc.mini_stream_cutoff:
                chain = blair_doc.get_mini_fat_chain(first_sect)
                stream = blair_doc.mini_stream
                byte_offset = blair_doc.mini_byte_offset
                sect_size = blair_doc.mini_sect_size
            else:
                chain = blair_doc.get_fat_chain(first_sect)
                stream = blair_doc.cfb_stream
                byte_offset = blair_doc.byte_offset
                sect_size = blair_doc.sect_size
            # end if

            chains = list()
            start_index = 0
            delta = 1
            counter = 1
            while counter < len(chain):
                if (chain[counter] - chain[start_index]) == delta:
                    delta += 1
                    counter += 1
                    continue
                else:
                    chains.append((chain[start_index], (counter-start_index)))
                    start_index = counter
                    delta = 1
                    counter += 1
                # end if
            else:
                chains.append((chain[start_index], (counter-start_index)))
            # end while

            segments = \
                [(stream, byte_offset(x[0]), x[1] * sect_size) for x in chains]

            slack = CompositeIStream(segments)
            noslack = SubsetIStream(slack, 0, dir_entry.stream_size)

            slack.seek(0, SEEK_SET)
            noslack.seek(0, SEEK_SET)

            ae(blair_doc.get_stream(sid, slack=True).read(), slack.read())
            ae(blair_doc.get_stream(sid, slack=False).read(), noslack.read())
Ejemplo n.º 5
0
    def test__init__(self):
        ae = self.assertEqual
        at = self.assertTrue

        sample_doc = self.sample_doc
        blair_doc = self.blair_doc

        ae(sample_doc.ver_major, 0x3)
        ae(blair_doc.ver_major, 0x3)

        ae(sample_doc.ver_minor, 0x3E)
        ae(blair_doc.ver_minor, 0x3E)

        at(hasattr(sample_doc, "header"))
        at(hasattr(blair_doc, "header"))

        ae(sample_doc.header.sig, HEADER_SIG)
        ae(blair_doc.header.sig, HEADER_SIG)

        ae(sample_doc.header.clsid, UUID(int=0))
        ae(blair_doc.header.clsid, UUID(int=0))

        ae(sample_doc.header.ver_minor, 0x3E)
        ae(blair_doc.header.ver_minor, 0x3E)

        ae(sample_doc.header.ver_major, 0x3)
        ae(blair_doc.header.ver_major, 0x3)

        ae(sample_doc.header.byte_order, 0xFFFE)
        ae(blair_doc.header.byte_order, 0xFFFE)

        ae(sample_doc.header.sect_shift, 9)
        ae(blair_doc.header.sect_shift, 9)

        ae(sample_doc.header.mini_sect_shift, 6)
        ae(blair_doc.header.mini_sect_shift, 6)

        ae(sample_doc.header.rsvd, b"\x00\x00\x00\x00\x00\x00")
        ae(blair_doc.header.rsvd, b"\x00\x00\x00\x00\x00\x00")

        ae(sample_doc.header.dir_sect_count, 0)
        ae(blair_doc.header.dir_sect_count, 0)

        ae(sample_doc.header.fat_sect_count, 0x87)
        ae(blair_doc.header.fat_sect_count, 1)

        ae(sample_doc.header.dir_sect_offset, 0x3433)
        ae(blair_doc.header.dir_sect_offset, 0x7A)

        ae(sample_doc.header.trans_num, 0)
        ae(blair_doc.header.trans_num, 0)

        ae(sample_doc.header.mini_stream_cutoff, 0x1000)
        ae(blair_doc.header.mini_stream_cutoff, 0x1000)

        ae(sample_doc.header.mini_fat_sect_offset, 0x3435)
        ae(blair_doc.header.mini_fat_sect_offset, 0x7C)

        ae(sample_doc.header.mini_fat_sect_count, 3)
        ae(blair_doc.header.mini_fat_sect_count, 1)

        ae(sample_doc.header.di_fat_sect_offset, 0x3657)
        ae(blair_doc.header.di_fat_sect_offset, 0xFFFFFFFE)

        ae(sample_doc.header.di_fat_sect_count, 1)
        ae(blair_doc.header.di_fat_sect_count, 0)

        sample_di_fat_data = ( b"\xCA\x33\x00\x00"
            b"\xCB\x33\x00\x00\xCC\x33\x00\x00\xCD\x33\x00\x00\xCE\x33\x00\x00"
            b"\xCF\x33\x00\x00\xD0\x33\x00\x00\xD1\x33\x00\x00\xD2\x33\x00\x00"
            b"\xD3\x33\x00\x00\xD4\x33\x00\x00\xD5\x33\x00\x00\xD6\x33\x00\x00"
            b"\xD7\x33\x00\x00\xD8\x33\x00\x00\xD9\x33\x00\x00\xDA\x33\x00\x00"
            b"\xDB\x33\x00\x00\xDC\x33\x00\x00\xDD\x33\x00\x00\xDE\x33\x00\x00"
            b"\xDF\x33\x00\x00\xE0\x33\x00\x00\xE1\x33\x00\x00\xE2\x33\x00\x00"
            b"\xE3\x33\x00\x00\xE4\x33\x00\x00\xE5\x33\x00\x00\xE6\x33\x00\x00"
            b"\xE7\x33\x00\x00\xE8\x33\x00\x00\xE9\x33\x00\x00\xEA\x33\x00\x00"
            b"\xEB\x33\x00\x00\xEC\x33\x00\x00\xED\x33\x00\x00\xEE\x33\x00\x00"
            b"\xEF\x33\x00\x00\xF0\x33\x00\x00\xF1\x33\x00\x00\xF2\x33\x00\x00"
            b"\xF3\x33\x00\x00\xF4\x33\x00\x00\xF5\x33\x00\x00\xF6\x33\x00\x00"
            b"\xF7\x33\x00\x00\xF8\x33\x00\x00\xF9\x33\x00\x00\xFA\x33\x00\x00"
            b"\xFB\x33\x00\x00\xFC\x33\x00\x00\xFD\x33\x00\x00\xFE\x33\x00\x00"
            b"\xFF\x33\x00\x00\x00\x34\x00\x00\x01\x34\x00\x00\x02\x34\x00\x00"
            b"\x03\x34\x00\x00\x04\x34\x00\x00\x05\x34\x00\x00\x06\x34\x00\x00"
            b"\x07\x34\x00\x00\x08\x34\x00\x00\x09\x34\x00\x00\x0A\x34\x00\x00"
            b"\x0B\x34\x00\x00\x0C\x34\x00\x00\x0D\x34\x00\x00\x0E\x34\x00\x00"
            b"\x0F\x34\x00\x00\x10\x34\x00\x00\x11\x34\x00\x00\x12\x34\x00\x00"
            b"\x13\x34\x00\x00\x14\x34\x00\x00\x15\x34\x00\x00\x16\x34\x00\x00"
            b"\x17\x34\x00\x00\x18\x34\x00\x00\x19\x34\x00\x00\x1A\x34\x00\x00"
            b"\x1B\x34\x00\x00\x1C\x34\x00\x00\x1D\x34\x00\x00\x1E\x34\x00\x00"
            b"\x1F\x34\x00\x00\x20\x34\x00\x00\x21\x34\x00\x00\x22\x34\x00\x00"
            b"\x23\x34\x00\x00\x24\x34\x00\x00\x25\x34\x00\x00\x26\x34\x00\x00"
            b"\x27\x34\x00\x00\x28\x34\x00\x00\x29\x34\x00\x00\x2A\x34\x00\x00"
            b"\x2B\x34\x00\x00\x2C\x34\x00\x00\x2D\x34\x00\x00\x2E\x34\x00\x00"
            b"\x2F\x34\x00\x00\x30\x34\x00\x00\x31\x34\x00\x00\x32\x34\x00\x00"
            b"\x52\x34\x00\x00\xD3\x34\x00\x00\x54\x35\x00\x00\xD5\x35\x00\x00"
            b"\x56\x36\x00\x00\xD8\x36\x00\x00\x59\x37\x00\x00\xDA\x37\x00\x00"
            b"\x5B\x38\x00\x00\xDC\x38\x00\x00\x5D\x39\x00\x00\xDE\x39\x00\x00"
            b"\x5F\x3A\x00\x00\xE0\x3A\x00\x00\x61\x3B\x00\x00\xE2\x3B\x00\x00"
            b"\x63\x3C\x00\x00\xE4\x3C\x00\x00\x65\x3D\x00\x00\xE6\x3D\x00\x00"
            b"\x67\x3E\x00\x00\xE8\x3E\x00\x00\x69\x3F\x00\x00\xEA\x3F\x00\x00"
            b"\x6B\x40\x00\x00\xEC\x40\x00\x00\x6D\x41\x00\x00\xF4\x41\x00\x00"
            b"\x2E\x42\x00\x00\x2F\x42\x00\x00\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
        )

        header_di_fat_data = list(unpack("109I", sample_di_fat_data[:436]))
        sample_di_fat_data = list(unpack("236I", sample_di_fat_data))
        ae(sample_doc.header.di_fat, header_di_fat_data)
        ae(sample_doc.di_fat, sample_di_fat_data)

        blair_di_fat_data = ( b"\x79\x00\x00\x00"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
        )

        blair_di_fat_data = list(unpack("109I", blair_di_fat_data))
        ae(blair_doc.header.di_fat, blair_di_fat_data)
        ae(blair_doc.di_fat, blair_di_fat_data)

        ae(sample_doc.sect_size, 512)
        ae(blair_doc.sect_size, 512)

        ae(sample_doc.mini_sect_size, 64)
        ae(blair_doc.mini_sect_size, 64)

        ae(sample_doc.mini_stream_cutoff, 4096)
        ae(blair_doc.mini_stream_cutoff, 4096)

        stream = sample_doc.cfb_stream
        segments = [
            (stream, (x + 1) * 512, 512) for x in sample_di_fat_data[:135]
        ]
        stream = CompositeIStream(segments)
        stream.seek(0, SEEK_SET)
        sample_fat = list(unpack("17280I", stream.read()))

        stream = SubsetIStream(blair_doc.cfb_stream, (122 * 512), 512)
        stream.seek(0, SEEK_SET)
        blair_fat = list(unpack("128I", stream.read()))

        ae(sample_doc.fat, sample_fat)
        ae(blair_doc.fat, blair_fat)

        stream = sample_doc.cfb_stream
        segments = [
            (stream, (0x3436 * 512), 512),
            (stream, (0x4218 * 512), 512),
            (stream, (0x422B * 512), 512)
        ]
        stream = CompositeIStream(segments)
        stream.seek(0, SEEK_SET)
        sample_mini_fat = list(unpack("384I", stream.read()))

        stream = SubsetIStream(blair_doc.cfb_stream, (124 + 1) * 512, 512)
        stream.seek(0, SEEK_SET)
        blair_mini_fat = list(unpack("128I", stream.read()))

        ae(sample_doc.mini_fat, sample_mini_fat)
        ae(blair_doc.mini_fat, blair_mini_fat)

        stream = sample_doc.cfb_stream
        sample_mini_stream_sects = [
            0x3436, 0x3451, 0x41ef, 0x41f0, 0x41f1, 0x41f2, 0x41f3, 0x420c,
            0x420d, 0x420e, 0x420f, 0x4210, 0x4211, 0x4213, 0x4214, 0x4216,
            0x4218, 0x4219, 0x421a, 0x421c, 0x421d, 0x421e, 0x421f, 0x4220,
            0x4221, 0x4222, 0x4223, 0x4224, 0x4225, 0x4226, 0x4227, 0x4228,
            0x422b, 0x435c, 0x435d, 0x435e, 0x435f, 0x4360, 0x4361, 0x4363,
        ]
        segments = \
            [(stream, (x + 1) * 512, 512) for x in sample_mini_stream_sects]
        sample_mini_stream = CompositeIStream(segments)

        blair_mini_stream = \
            SubsetIStream(blair_doc.cfb_stream, (125 + 1) * 512, 512)

        sample_doc.mini_stream.seek(0, SEEK_SET)
        sample_mini_stream.seek(0, SEEK_SET)
        blair_doc.mini_stream.seek(0, SEEK_SET)
        blair_mini_stream.seek(0, SEEK_SET)

        ae(sample_doc.mini_stream.read(), sample_mini_stream.read())
        ae(blair_doc.mini_stream.read(), blair_mini_stream.read())

        stream = sample_doc.cfb_stream
        sample_dir_stream_sects = [
            0x3433, 0x3434, 0x3437, 0x41EE, 0x420B, 0x4212, 0x4215, 0x421B,
            0x4229, 0x422C, 0x422D, 0x4362
        ]
        segments = \
            [(stream, (x + 1) * 512, 512) for x in sample_dir_stream_sects]
        sample_dir_stream = CompositeIStream(segments)

        stream = blair_doc.cfb_stream
        blair_dir_stream_sects = [122, 123]
        segments = \
            [(stream, (x + 1) * 512, 512) for x in blair_dir_stream_sects]
        blair_dir_stream = CompositeIStream(segments)

        sample_dir_stream.seek(0, SEEK_SET)
        sample_doc.dir_stream.seek(0, SEEK_SET)
        blair_dir_stream.seek(0, SEEK_SET)
        blair_doc.dir_stream.seek(0, SEEK_SET)

        ae(sample_doc.dir_stream.read(), sample_dir_stream.read())
        ae(blair_doc.dir_stream.read(), blair_dir_stream.read())

        sample_dir_stream.seek(0, SEEK_SET)
        sample_dir_entries = dict()
        for counter in range(48):
            sample_dir_entries[counter] = \
                DirEntry.from_stream(sample_dir_stream, counter * 128)
        # end for

        blair_dir_stream.seek(0, SEEK_SET)
        blair_dir_entries = dict()
        for counter in range(8):
            blair_dir_entries[counter] = \
                DirEntry.from_stream(blair_dir_stream, counter * 128)
        # end for

        ae(sample_doc.dir_entries, sample_dir_entries)
        ae(blair_doc.dir_entries, blair_dir_entries)

        ae(sample_doc.root_dir_entry, sample_dir_entries[0])
        ae(blair_doc.root_dir_entry, blair_dir_entries[0])

        ae(sample_doc.cfb_stream, self.sample_doc_stream)
        ae(blair_doc.cfb_stream, self.blair_doc_stream)
Ejemplo n.º 6
0
    def test_get_stream(self):
        ae = self.assertEqual
        ar = self.assertRaises

        sample_doc = self.sample_doc
        blair_doc = self.blair_doc

        for sid in sample_doc.dir_entries:
            dir_entry = sample_doc.dir_entries[sid]
            first_sect = dir_entry.stream_sect_offset

            if sid == 0:
                chain = sample_doc.get_fat_chain(first_sect)
                stream = sample_doc.cfb_stream
                byte_offset = sample_doc.byte_offset
                sect_size = sample_doc.sect_size
            elif dir_entry.stream_size < sample_doc.mini_stream_cutoff:
                chain = sample_doc.get_mini_fat_chain(first_sect)
                stream = sample_doc.mini_stream
                byte_offset = sample_doc.mini_byte_offset
                sect_size = sample_doc.mini_sect_size
            else:
                chain = sample_doc.get_fat_chain(first_sect)
                stream = sample_doc.cfb_stream
                byte_offset = sample_doc.byte_offset
                sect_size = sample_doc.sect_size
            # end if

            chains = list()
            start_index = 0
            delta = 1
            counter = 1
            while counter < len(chain):
                if (chain[counter] - chain[start_index]) == delta:
                    delta += 1
                    counter += 1
                    continue
                else:
                    chains.append(
                        (chain[start_index], (counter - start_index)))
                    start_index = counter
                    delta = 1
                    counter += 1
                # end if
            else:
                chains.append((chain[start_index], (counter - start_index)))
            # end while

            segments = \
                [(stream, byte_offset(x[0]), x[1] * sect_size) for x in chains]

            slack = CompositeIStream(segments)
            noslack = SubsetIStream(slack, 0, dir_entry.stream_size)

            slack.seek(0, SEEK_SET)
            noslack.seek(0, SEEK_SET)

            ae(sample_doc.get_stream(sid, slack=True).read(), slack.read())
            ae(sample_doc.get_stream(sid, slack=False).read(), noslack.read())
        # end for

        for sid in blair_doc.dir_entries:
            dir_entry = blair_doc.dir_entries[sid]
            first_sect = dir_entry.stream_sect_offset

            if sid == 0:
                chain = blair_doc.get_fat_chain(first_sect)
                stream = blair_doc.cfb_stream
                byte_offset = blair_doc.byte_offset
                sect_size = blair_doc.sect_size
            elif dir_entry.stream_size < blair_doc.mini_stream_cutoff:
                chain = blair_doc.get_mini_fat_chain(first_sect)
                stream = blair_doc.mini_stream
                byte_offset = blair_doc.mini_byte_offset
                sect_size = blair_doc.mini_sect_size
            else:
                chain = blair_doc.get_fat_chain(first_sect)
                stream = blair_doc.cfb_stream
                byte_offset = blair_doc.byte_offset
                sect_size = blair_doc.sect_size
            # end if

            chains = list()
            start_index = 0
            delta = 1
            counter = 1
            while counter < len(chain):
                if (chain[counter] - chain[start_index]) == delta:
                    delta += 1
                    counter += 1
                    continue
                else:
                    chains.append(
                        (chain[start_index], (counter - start_index)))
                    start_index = counter
                    delta = 1
                    counter += 1
                # end if
            else:
                chains.append((chain[start_index], (counter - start_index)))
            # end while

            segments = \
                [(stream, byte_offset(x[0]), x[1] * sect_size) for x in chains]

            slack = CompositeIStream(segments)
            noslack = SubsetIStream(slack, 0, dir_entry.stream_size)

            slack.seek(0, SEEK_SET)
            noslack.seek(0, SEEK_SET)

            ae(blair_doc.get_stream(sid, slack=True).read(), slack.read())
            ae(blair_doc.get_stream(sid, slack=False).read(), noslack.read())
Ejemplo n.º 7
0
    def test__init__(self):
        ae = self.assertEqual
        at = self.assertTrue

        sample_doc = self.sample_doc
        blair_doc = self.blair_doc

        ae(sample_doc.ver_major, 0x3)
        ae(blair_doc.ver_major, 0x3)

        ae(sample_doc.ver_minor, 0x3E)
        ae(blair_doc.ver_minor, 0x3E)

        at(hasattr(sample_doc, "header"))
        at(hasattr(blair_doc, "header"))

        ae(sample_doc.header.sig, HEADER_SIG)
        ae(blair_doc.header.sig, HEADER_SIG)

        ae(sample_doc.header.clsid, UUID(int=0))
        ae(blair_doc.header.clsid, UUID(int=0))

        ae(sample_doc.header.ver_minor, 0x3E)
        ae(blair_doc.header.ver_minor, 0x3E)

        ae(sample_doc.header.ver_major, 0x3)
        ae(blair_doc.header.ver_major, 0x3)

        ae(sample_doc.header.byte_order, 0xFFFE)
        ae(blair_doc.header.byte_order, 0xFFFE)

        ae(sample_doc.header.sect_shift, 9)
        ae(blair_doc.header.sect_shift, 9)

        ae(sample_doc.header.mini_sect_shift, 6)
        ae(blair_doc.header.mini_sect_shift, 6)

        ae(sample_doc.header.rsvd, b"\x00\x00\x00\x00\x00\x00")
        ae(blair_doc.header.rsvd, b"\x00\x00\x00\x00\x00\x00")

        ae(sample_doc.header.dir_sect_count, 0)
        ae(blair_doc.header.dir_sect_count, 0)

        ae(sample_doc.header.fat_sect_count, 0x87)
        ae(blair_doc.header.fat_sect_count, 1)

        ae(sample_doc.header.dir_sect_offset, 0x3433)
        ae(blair_doc.header.dir_sect_offset, 0x7A)

        ae(sample_doc.header.trans_num, 0)
        ae(blair_doc.header.trans_num, 0)

        ae(sample_doc.header.mini_stream_cutoff, 0x1000)
        ae(blair_doc.header.mini_stream_cutoff, 0x1000)

        ae(sample_doc.header.mini_fat_sect_offset, 0x3435)
        ae(blair_doc.header.mini_fat_sect_offset, 0x7C)

        ae(sample_doc.header.mini_fat_sect_count, 3)
        ae(blair_doc.header.mini_fat_sect_count, 1)

        ae(sample_doc.header.di_fat_sect_offset, 0x3657)
        ae(blair_doc.header.di_fat_sect_offset, 0xFFFFFFFE)

        ae(sample_doc.header.di_fat_sect_count, 1)
        ae(blair_doc.header.di_fat_sect_count, 0)

        sample_di_fat_data = (
            b"\xCA\x33\x00\x00"
            b"\xCB\x33\x00\x00\xCC\x33\x00\x00\xCD\x33\x00\x00\xCE\x33\x00\x00"
            b"\xCF\x33\x00\x00\xD0\x33\x00\x00\xD1\x33\x00\x00\xD2\x33\x00\x00"
            b"\xD3\x33\x00\x00\xD4\x33\x00\x00\xD5\x33\x00\x00\xD6\x33\x00\x00"
            b"\xD7\x33\x00\x00\xD8\x33\x00\x00\xD9\x33\x00\x00\xDA\x33\x00\x00"
            b"\xDB\x33\x00\x00\xDC\x33\x00\x00\xDD\x33\x00\x00\xDE\x33\x00\x00"
            b"\xDF\x33\x00\x00\xE0\x33\x00\x00\xE1\x33\x00\x00\xE2\x33\x00\x00"
            b"\xE3\x33\x00\x00\xE4\x33\x00\x00\xE5\x33\x00\x00\xE6\x33\x00\x00"
            b"\xE7\x33\x00\x00\xE8\x33\x00\x00\xE9\x33\x00\x00\xEA\x33\x00\x00"
            b"\xEB\x33\x00\x00\xEC\x33\x00\x00\xED\x33\x00\x00\xEE\x33\x00\x00"
            b"\xEF\x33\x00\x00\xF0\x33\x00\x00\xF1\x33\x00\x00\xF2\x33\x00\x00"
            b"\xF3\x33\x00\x00\xF4\x33\x00\x00\xF5\x33\x00\x00\xF6\x33\x00\x00"
            b"\xF7\x33\x00\x00\xF8\x33\x00\x00\xF9\x33\x00\x00\xFA\x33\x00\x00"
            b"\xFB\x33\x00\x00\xFC\x33\x00\x00\xFD\x33\x00\x00\xFE\x33\x00\x00"
            b"\xFF\x33\x00\x00\x00\x34\x00\x00\x01\x34\x00\x00\x02\x34\x00\x00"
            b"\x03\x34\x00\x00\x04\x34\x00\x00\x05\x34\x00\x00\x06\x34\x00\x00"
            b"\x07\x34\x00\x00\x08\x34\x00\x00\x09\x34\x00\x00\x0A\x34\x00\x00"
            b"\x0B\x34\x00\x00\x0C\x34\x00\x00\x0D\x34\x00\x00\x0E\x34\x00\x00"
            b"\x0F\x34\x00\x00\x10\x34\x00\x00\x11\x34\x00\x00\x12\x34\x00\x00"
            b"\x13\x34\x00\x00\x14\x34\x00\x00\x15\x34\x00\x00\x16\x34\x00\x00"
            b"\x17\x34\x00\x00\x18\x34\x00\x00\x19\x34\x00\x00\x1A\x34\x00\x00"
            b"\x1B\x34\x00\x00\x1C\x34\x00\x00\x1D\x34\x00\x00\x1E\x34\x00\x00"
            b"\x1F\x34\x00\x00\x20\x34\x00\x00\x21\x34\x00\x00\x22\x34\x00\x00"
            b"\x23\x34\x00\x00\x24\x34\x00\x00\x25\x34\x00\x00\x26\x34\x00\x00"
            b"\x27\x34\x00\x00\x28\x34\x00\x00\x29\x34\x00\x00\x2A\x34\x00\x00"
            b"\x2B\x34\x00\x00\x2C\x34\x00\x00\x2D\x34\x00\x00\x2E\x34\x00\x00"
            b"\x2F\x34\x00\x00\x30\x34\x00\x00\x31\x34\x00\x00\x32\x34\x00\x00"
            b"\x52\x34\x00\x00\xD3\x34\x00\x00\x54\x35\x00\x00\xD5\x35\x00\x00"
            b"\x56\x36\x00\x00\xD8\x36\x00\x00\x59\x37\x00\x00\xDA\x37\x00\x00"
            b"\x5B\x38\x00\x00\xDC\x38\x00\x00\x5D\x39\x00\x00\xDE\x39\x00\x00"
            b"\x5F\x3A\x00\x00\xE0\x3A\x00\x00\x61\x3B\x00\x00\xE2\x3B\x00\x00"
            b"\x63\x3C\x00\x00\xE4\x3C\x00\x00\x65\x3D\x00\x00\xE6\x3D\x00\x00"
            b"\x67\x3E\x00\x00\xE8\x3E\x00\x00\x69\x3F\x00\x00\xEA\x3F\x00\x00"
            b"\x6B\x40\x00\x00\xEC\x40\x00\x00\x6D\x41\x00\x00\xF4\x41\x00\x00"
            b"\x2E\x42\x00\x00\x2F\x42\x00\x00\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF")

        header_di_fat_data = list(unpack("109I", sample_di_fat_data[:436]))
        sample_di_fat_data = list(unpack("236I", sample_di_fat_data))
        ae(sample_doc.header.di_fat, header_di_fat_data)
        ae(sample_doc.di_fat, sample_di_fat_data)

        blair_di_fat_data = (
            b"\x79\x00\x00\x00"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
            b"\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF"
        )

        blair_di_fat_data = list(unpack("109I", blair_di_fat_data))
        ae(blair_doc.header.di_fat, blair_di_fat_data)
        ae(blair_doc.di_fat, blair_di_fat_data)

        ae(sample_doc.sect_size, 512)
        ae(blair_doc.sect_size, 512)

        ae(sample_doc.mini_sect_size, 64)
        ae(blair_doc.mini_sect_size, 64)

        ae(sample_doc.mini_stream_cutoff, 4096)
        ae(blair_doc.mini_stream_cutoff, 4096)

        stream = sample_doc.cfb_stream
        segments = [(stream, (x + 1) * 512, 512)
                    for x in sample_di_fat_data[:135]]
        stream = CompositeIStream(segments)
        stream.seek(0, SEEK_SET)
        sample_fat = list(unpack("17280I", stream.read()))

        stream = SubsetIStream(blair_doc.cfb_stream, (122 * 512), 512)
        stream.seek(0, SEEK_SET)
        blair_fat = list(unpack("128I", stream.read()))

        ae(sample_doc.fat, sample_fat)
        ae(blair_doc.fat, blair_fat)

        stream = sample_doc.cfb_stream
        segments = [(stream, (0x3436 * 512), 512),
                    (stream, (0x4218 * 512), 512),
                    (stream, (0x422B * 512), 512)]
        stream = CompositeIStream(segments)
        stream.seek(0, SEEK_SET)
        sample_mini_fat = list(unpack("384I", stream.read()))

        stream = SubsetIStream(blair_doc.cfb_stream, (124 + 1) * 512, 512)
        stream.seek(0, SEEK_SET)
        blair_mini_fat = list(unpack("128I", stream.read()))

        ae(sample_doc.mini_fat, sample_mini_fat)
        ae(blair_doc.mini_fat, blair_mini_fat)

        stream = sample_doc.cfb_stream
        sample_mini_stream_sects = [
            0x3436,
            0x3451,
            0x41ef,
            0x41f0,
            0x41f1,
            0x41f2,
            0x41f3,
            0x420c,
            0x420d,
            0x420e,
            0x420f,
            0x4210,
            0x4211,
            0x4213,
            0x4214,
            0x4216,
            0x4218,
            0x4219,
            0x421a,
            0x421c,
            0x421d,
            0x421e,
            0x421f,
            0x4220,
            0x4221,
            0x4222,
            0x4223,
            0x4224,
            0x4225,
            0x4226,
            0x4227,
            0x4228,
            0x422b,
            0x435c,
            0x435d,
            0x435e,
            0x435f,
            0x4360,
            0x4361,
            0x4363,
        ]
        segments = \
            [(stream, (x + 1) * 512, 512) for x in sample_mini_stream_sects]
        sample_mini_stream = CompositeIStream(segments)

        blair_mini_stream = \
            SubsetIStream(blair_doc.cfb_stream, (125 + 1) * 512, 512)

        sample_doc.mini_stream.seek(0, SEEK_SET)
        sample_mini_stream.seek(0, SEEK_SET)
        blair_doc.mini_stream.seek(0, SEEK_SET)
        blair_mini_stream.seek(0, SEEK_SET)

        ae(sample_doc.mini_stream.read(), sample_mini_stream.read())
        ae(blair_doc.mini_stream.read(), blair_mini_stream.read())

        stream = sample_doc.cfb_stream
        sample_dir_stream_sects = [
            0x3433, 0x3434, 0x3437, 0x41EE, 0x420B, 0x4212, 0x4215, 0x421B,
            0x4229, 0x422C, 0x422D, 0x4362
        ]
        segments = \
            [(stream, (x + 1) * 512, 512) for x in sample_dir_stream_sects]
        sample_dir_stream = CompositeIStream(segments)

        stream = blair_doc.cfb_stream
        blair_dir_stream_sects = [122, 123]
        segments = \
            [(stream, (x + 1) * 512, 512) for x in blair_dir_stream_sects]
        blair_dir_stream = CompositeIStream(segments)

        sample_dir_stream.seek(0, SEEK_SET)
        sample_doc.dir_stream.seek(0, SEEK_SET)
        blair_dir_stream.seek(0, SEEK_SET)
        blair_doc.dir_stream.seek(0, SEEK_SET)

        ae(sample_doc.dir_stream.read(), sample_dir_stream.read())
        ae(blair_doc.dir_stream.read(), blair_dir_stream.read())

        sample_dir_stream.seek(0, SEEK_SET)
        sample_dir_entries = dict()
        for counter in range(48):
            sample_dir_entries[counter] = \
                DirEntry.from_stream(sample_dir_stream, counter * 128)
        # end for

        blair_dir_stream.seek(0, SEEK_SET)
        blair_dir_entries = dict()
        for counter in range(8):
            blair_dir_entries[counter] = \
                DirEntry.from_stream(blair_dir_stream, counter * 128)
        # end for

        ae(sample_doc.dir_entries, sample_dir_entries)
        ae(blair_doc.dir_entries, blair_dir_entries)

        ae(sample_doc.root_dir_entry, sample_dir_entries[0])
        ae(blair_doc.root_dir_entry, blair_dir_entries[0])

        ae(sample_doc.cfb_stream, self.sample_doc_stream)
        ae(blair_doc.cfb_stream, self.blair_doc_stream)