Exemple #1
0
 def createFields(self):
     if self.frame["../type"].value in [0xC0, 0xC1]:
         # yay, huffman coding!
         if not hasattr(self, "huffman_tables"):
             self.huffman_tables = {}
             for huffman in self.parent.array("huffman"):
                 for table in huffman["content"].array("huffman_table"):
                     for _dummy_ in table:
                         # exhaust table, so the huffman tree is built
                         pass
                     self.huffman_tables[table["table_class"].value, table["index"].value] = table.tree
         components = [] # sos_comp, samples
         max_vert = 0
         max_horiz = 0
         for component in self.scan.array("component"):
             for sof_comp in self.frame.array("component"):
                 if sof_comp["component_id"].value == component["component_id"].value:
                     vert = sof_comp["vert_sample"].value
                     horiz = sof_comp["horiz_sample"].value
                     components.append((component, vert * horiz))
                     max_vert = max(max_vert, vert)
                     max_horiz = max(max_horiz, horiz)
         mcu_height = alignValue(self.frame["height"].value, 8 * max_vert) // (8 * max_vert)
         mcu_width = alignValue(self.frame["width"].value, 8 * max_horiz) // (8 * max_horiz)
         if self.restart_interval and self.restart_offset > 0:
             mcu_number = self.restart_interval * self.restart_offset
         else:
             mcu_number = 0
         initial_mcu = mcu_number
         while True:
             if (self.restart_interval and mcu_number != initial_mcu and mcu_number % self.restart_interval == 0) or\
                mcu_number == mcu_height * mcu_width:
                 padding = paddingSize(self.current_size, 8)
                 if padding:
                     yield PaddingBits(self, "padding[]", padding) # all 1s
                 last_byte = self.stream.readBytes(self.absolute_address + self.current_size - 8, 1)
                 if last_byte == '\xFF':
                     next_byte = self.stream.readBytes(self.absolute_address + self.current_size, 1)
                     if next_byte != '\x00':
                         raise FieldError("Unexpected byte sequence %r!"%(last_byte + next_byte))
                     yield NullBytes(self, "stuffed_byte[]", 1)
                 break
             for sos_comp, num_units in components:
                 for interleave_count in range(num_units):
                     yield JpegHuffmanImageUnit(self, "block[%i]component[%i][]" % (mcu_number, sos_comp["component_id"].value),
                                           self.huffman_tables[0, sos_comp["dc_coding_table"].value],
                                           self.huffman_tables[1, sos_comp["ac_coding_table"].value])
             mcu_number += 1
     else:
         self.warning("Sorry, only supporting Baseline & Extended Sequential JPEG images so far!")
         return
Exemple #2
0
def extend_data(data, length, offset):
    """Extend data using a length and an offset."""
    if length >= offset:
        new_data = data[-offset:] * (alignValue(length, offset) // offset)
        return data + new_data[:length]
    else:
        return data + data[-offset:-offset+length]
Exemple #3
0
def extend_data(data, length, offset):
    """Extend data using a length and an offset."""
    if length >= offset:
        new_data = data[-offset:] * (alignValue(length, offset) // offset)
        return data + new_data[:length]
    else:
        return data + data[-offset:-offset + length]
Exemple #4
0
 def __init__(self, *args):
     FieldSet.__init__(self, *args)
     self._size = (8 + alignValue(self["size"].value, 2)) * 8
     tag = self["type"].value
     if tag in self.TAG_INFO:
         self._name, self._description, self._parser = self.TAG_INFO[tag]
     else:
         self._parser = None
 def __init__(self, *args, **kw):
     FieldSet.__init__(self, *args, **kw)
     try:
         self._name, self.handler, self._description = self.TAG_INFO[self["tag"].value]
     except KeyError:
         self.handler = None
     size = self["size"]
     self._size = size.address + size.size + alignValue(size.value, 2) * 8
Exemple #6
0
 def __init__(self, *args):
     FieldSet.__init__(self, *args)
     self._size = (8 + alignValue(self["size"].value, 2)) * 8
     tag = self["type"].value
     if tag in self.TAG_INFO:
         self._name, self._description, self._parser = self.TAG_INFO[tag]
     else:
         self._parser = None
Exemple #7
0
def calc_char_range(start, end):
    aligned_start = byte_addr(start)
    aligned_end = byte_addr(alignValue(end, 8))

    char_start = calc_char_pos(aligned_start)
    char_end = calc_char_pos(aligned_end)

    return char_start, char_end
Exemple #8
0
 def __init__(self, *args, **kw):
     FieldSet.__init__(self, *args, **kw)
     try:
         self._name, self.handler, self._description = self.TAG_INFO[
             self["tag"].value]
     except KeyError:
         self.handler = None
     size = self["size"]
     self._size = size.address + size.size + alignValue(size.value, 2) * 8
Exemple #9
0
 def createFields(self):
     if self.has_all_byte:
         yield Enum(UInt8(self, "all_defined"), {0: 'False', 1: 'True'})
         if self['all_defined'].value:
             return
     nbytes = alignValue(self.num, 8) // 8
     ctr = 0
     for i in xrange(nbytes):
         for j in reversed(xrange(8)):
             yield Bit(self, "bit[%d]" % (ctr + j))
         ctr += 8
Exemple #10
0
 def createFields(self):
     if self.has_all_byte:
         yield Enum(UInt8(self, "all_defined"), {0: "False", 1: "True"})
         if self["all_defined"].value:
             return
     nbytes = alignValue(self.num, 8) // 8
     ctr = 0
     for i in xrange(nbytes):
         for j in reversed(xrange(8)):
             yield Bit(self, "bit[%d]" % (ctr + j))
         ctr += 8
    def createFields(self):
        group = self["../group_desc/group[%u]" % self.uniq_id]
        superblock = self["/superblock"]
        block_size = self["/"].block_size

        # Read block bitmap
        addr = self.absolute_address + 56 * 8
        self.superblock_copy = (self.stream.readBytes(addr, 2) == "\x53\xEF")
        if self.superblock_copy:
            yield SuperBlock(self, "superblock_copy")

        # Compute number of block and inodes
        block_count = superblock["blocks_per_group"].value
        inode_count = superblock["inodes_per_group"].value
        block_index = self.uniq_id * block_count
        inode_index = self.uniq_id * inode_count
        if (block_count % 8) != 0:
            raise ParserError("Invalid block count")
        if (inode_count % 8) != 0:
            raise ParserError("Invalid inode count")
        block_count = min(block_count,
                          superblock["blocks_count"].value - block_index)
        inode_count = min(inode_count,
                          superblock["inodes_count"].value - inode_index)

        # Read block bitmap
        field = self.seekByte(group["block_bitmap"].value * block_size,
                              relative=False,
                              null=True)
        if field:
            yield field
        yield BlockBitmap(self, "block_bitmap", block_index, block_count,
                          "Block bitmap")

        # Read inode bitmap
        field = self.seekByte(group["inode_bitmap"].value * block_size,
                              relative=False)
        if field:
            yield field
        yield InodeBitmap(self, "inode_bitmap", inode_index, inode_count,
                          "Inode bitmap")

        # Read inode table
        field = self.seekByte(alignValue(self.current_size // 8, block_size))
        if field:
            yield field
        yield InodeTable(self, "inode_table", inode_index, inode_count)

        # Add padding if needed
        addr = min(self.parent.size / 8, (self.uniq_id + 1) *
                   superblock["blocks_per_group"].value * block_size)
        yield self.seekByte(addr, "data", relative=False)
Exemple #12
0
    def setCheckedSizes(self, size):
        # First set size so that end is aligned, if needed
        self.real_size = size
        size *= 8
        if self.ALIGN:
            size = alignValue(self.absolute_address+size, 8*self.ALIGN) \
                   - self.absolute_address

        if self._parent._size:
            if self._parent.current_size + size > self._parent._size:
                size = self._parent._size - self._parent.current_size

        self._size = size
Exemple #13
0
    def setCheckedSizes(self, size):
        # First set size so that end is aligned, if needed
        self.real_size = size
        size *= 8
        if self.ALIGN:
            size = alignValue(self.absolute_address+size, 8*self.ALIGN) \
                   - self.absolute_address

        if self._parent._size:
            if self._parent.current_size + size > self._parent._size:
                size = self._parent._size - self._parent.current_size

        self._size = size
def hexadecimal(field):
    """
    Convert an integer to hexadecimal in lower case. Returns unicode string.

    >>> hexadecimal(type("", (), dict(value=412, size=16)))
    u'0x019c'
    >>> hexadecimal(type("", (), dict(value=0, size=32)))
    u'0x00000000'
    """
    assert hasattr(field, "value") and hasattr(field, "size")
    size = field.size
    padding = alignValue(size, 4) // 4
    pattern = u"0x%%0%ux" % padding
    return pattern % field.value
Exemple #15
0
 def __init__(self, *args, **kw):
     FieldSet.__init__(self, *args, **kw)
     self._size = (8 + alignValue(self["size"].value, 2)) * 8
     tag = self["tag"].value
     if tag in self.TAG_INFO:
         self.tag_info = self.TAG_INFO[tag]
         if tag == "LIST":
             subtag = self["subtag"].value
             if subtag in self.subtag_info:
                 info = self.subtag_info[subtag]
                 self.tag_info = (info[0], None, info[1])
         self._name = self.tag_info[0]
         self._description = self.tag_info[2]
     else:
         self.tag_info = ("field[]", None, None)
Exemple #16
0
 def __init__(self, *args, **kw):
     FieldSet.__init__(self, *args, **kw)
     self._size = (8 + alignValue(self["size"].value, 2)) * 8
     tag = self["tag"].value
     if tag in self.TAG_INFO:
         self.tag_info = self.TAG_INFO[tag]
         if tag == "LIST":
             subtag = self["subtag"].value
             if subtag in self.subtag_info:
                 info = self.subtag_info[subtag]
                 self.tag_info = (info[0], None, info[1])
         self._name = self.tag_info[0]
         self._description = self.tag_info[2]
     else:
         self.tag_info = ("field[]", None, None)
Exemple #17
0
    def createFields(self):
        group = self["../group_desc/group[%u]" % self.uniq_id]
        superblock = self["/superblock"]
        block_size = self["/"].block_size

        # Read block bitmap
        addr = self.absolute_address + 56*8
        self.superblock_copy = (self.stream.readBytes(addr, 2) == "\x53\xEF")
        if self.superblock_copy:
            yield SuperBlock(self, "superblock_copy")

        # Compute number of block and inodes
        block_count = superblock["blocks_per_group"].value
        inode_count = superblock["inodes_per_group"].value
        block_index = self.uniq_id * block_count
        inode_index = self.uniq_id * inode_count
        if (block_count % 8) != 0:
            raise ParserError("Invalid block count")
        if (inode_count % 8) != 0:
            raise ParserError("Invalid inode count")
        block_count = min(block_count, superblock["blocks_count"].value - block_index)
        inode_count = min(inode_count, superblock["inodes_count"].value - inode_index)

        # Read block bitmap
        field = self.seekByte(group["block_bitmap"].value * block_size, relative=False, null=True)
        if field:
            yield field
        yield BlockBitmap(self, "block_bitmap", block_index, block_count, "Block bitmap")

        # Read inode bitmap
        field = self.seekByte(group["inode_bitmap"].value * block_size, relative=False)
        if field:
            yield field
        yield InodeBitmap(self, "inode_bitmap", inode_index, inode_count, "Inode bitmap")

        # Read inode table
        field = self.seekByte(alignValue(self.current_size//8, block_size))
        if field:
            yield field
        yield InodeTable(self, "inode_table", inode_index, inode_count)

        # Add padding if needed
        addr = min(self.parent.size / 8,
            (self.uniq_id+1) * superblock["blocks_per_group"].value * block_size)
        yield self.seekByte(addr, "data", relative=False)
 def createFields(self):
     yield String(self, "signature", 4, "8BIM signature", charset="ASCII")
     if self["signature"].value != "8BIM":
         raise ParserError("Stream doesn't look like 8BIM item (wrong signature)!")
     yield textHandler(UInt16(self, "tag"), hexadecimal)
     if self.stream.readBytes(self.absolute_address + self.current_size, 4) != "\0\0\0\0":
         yield PascalString8(self, "name")
         size = 2 + (self["name"].size // 8) % 2
         yield NullBytes(self, "name_padding", size)
     else:
         yield String(self, "name", 4, strip="\0")
     yield UInt16(self, "size")
     size = alignValue(self["size"].value, 2)
     if not size:
         return
     if self.handler:
         yield self.handler(self, "content", size=size*8)
     else:
         yield RawBytes(self, "content", size)
Exemple #19
0
    def readBits(self, address, nbits, endian):
        assert endian in (BIG_ENDIAN, LITTLE_ENDIAN, MIDDLE_ENDIAN)

        if endian is MIDDLE_ENDIAN:
            # read an aligned chunk of words
            wordaddr, remainder = divmod(address, 16)
            wordnbits = alignValue(remainder + nbits, 16)
            _, data, missing = self.read(wordaddr * 16, wordnbits)
            shift = remainder
        else:
            shift, data, missing = self.read(address, nbits)
        if missing:
            raise ReadStreamError(nbits, address)
        value = str2long(data, endian)
        if endian in (BIG_ENDIAN, MIDDLE_ENDIAN):
            value >>= len(data) * 8 - shift - nbits
        else:
            value >>= shift
        return value & (1 << nbits) - 1
Exemple #20
0
    def readBits(self, address, nbits, endian):
        assert endian in (BIG_ENDIAN, LITTLE_ENDIAN, MIDDLE_ENDIAN)

        if endian is MIDDLE_ENDIAN:
            # read an aligned chunk of words
            wordaddr, remainder = divmod(address, 16)
            wordnbits = alignValue(remainder+nbits, 16)
            _, data, missing = self.read(wordaddr*16, wordnbits)
            shift = remainder
        else:
            shift, data, missing = self.read(address, nbits)
        if missing:
            raise ReadStreamError(nbits, address)
        value = str2long(data, endian)
        if endian in (BIG_ENDIAN, MIDDLE_ENDIAN):
            value >>= len(data) * 8 - shift - nbits
        else:
            value >>= shift
        return value & (1 << nbits) - 1
 def createFields(self):
     yield String(self, "signature", 4, "8BIM signature", charset="ASCII")
     if self["signature"].value != "8BIM":
         raise ParserError(
             "Stream doesn't look like 8BIM item (wrong signature)!")
     yield textHandler(UInt16(self, "tag"), hexadecimal)
     if self.stream.readBytes(self.absolute_address + self.current_size,
                              4) != "\0\0\0\0":
         yield PascalString8(self, "name")
         size = 2 + (self["name"].size // 8) % 2
         yield NullBytes(self, "name_padding", size)
     else:
         yield String(self, "name", 4, strip="\0")
     yield UInt16(self, "size")
     size = alignValue(self["size"].value, 2)
     if not size:
         return
     if self.handler:
         yield self.handler(self, "content", size=size * 8)
     else:
         yield RawBytes(self, "content", size)
Exemple #22
0
def handle_form():
    """Process submitted data.

    See comments for details.

    """
    prune_old()
    form = cgi.FieldStorage()
    if 'file' in form and form['file'].file:
        # compute session id
        sessid = get_sessid()
        if not sessid:
            rand = str(time.time())+form['file'].filename+str(random.random())
            sessid = hashlib.md5(rand).hexdigest()
        # write uploaded file
        f = open(tmp_dir+sessid+'.file','wb')
        if form['file'].done==-1:
            raise ValueError("File upload canceled?")
        while f.tell()<2**22: # 4MB limit
            chunk = form['file'].file.read(32768) # 32KB chunks
            if not chunk:
                break
            f.write(chunk)
        if f.tell() == 0:
            f.close()
            print_form('Nothing uploaded.')
            return
        f.close()
        # write session variables
        try:
            fn = unicode(form['file'].filename,'utf-8')
        except UnicodeDecodeError:
            fn = unicode(form['file'].filename,'iso-8859-1')
        # stream "None" represents the original stream
        save_data({'filename':fn,'streams':[(None, None, fn)]}, sessid)
        # send session id and reset variables
        c = SimpleCookie()
        c['sess'] = sessid
        c['hpath'] = '/' # clear path var.
        c['stream'] = '0' # clear stream var
        print c # send cookie to client (headers)
        print_page() # print AJAX frame page
    elif get_sessid(): # or perhaps you already have a file to parse?
        if not 'hpath' in form:
            print_page()
            return
        # redirect stderr, so we can catch parser errors
        sys.stderr = StringIO()
        # load variables
        hpath = cgi.escape(form.getfirst('hpath','/'))
        stream_id = int(form.getfirst('stream','0'))
        path = hpath.split(':')[stream_id]
        sessid = get_sessid()
        try:
            data = cPickle.load(file(tmp_dir+sessid+'.sess','rb'))
        except IOError:
            print_error('Your file was deleted due to inactivity. '
                'Please upload a new one.')
            return
        stream, parser = get_parser(data, data['streams'][stream_id], sessid)
        if parser is None:
            return # sorry, couldn't parse file!
        if 'save' in form:
            # "Download Raw"
            f = FileFromInputStream(stream)
            fld = parser[path]
            f.seek(fld.absolute_address/8)
            size = alignValue(fld.size, 8)/8
            sys.stdout.write('Content-Type: application/octet-stream\r\n')
            sys.stdout.write('Content-Length: %i\r\n'%size)
            sys.stdout.write('Content-Disposition: attachment; '
                'filename=%s\r\n\r\n'%path.strip('/').split('/')[-1])
            sys.stdout.write(f.read(size))
            return
        elif 'savesub' in form:
            # "Download Substream"
            stream = parser[path.rstrip('/')].getSubIStream()
            filename = path.strip('/').split('/')[-1]
            tags = getattr(stream,'tags',[])
            for tag in tags:
                if tag[0] == 'filename':
                    filename = tag[1]
            sys.stdout.write('Content-Type: application/octet-stream\r\n')
            sys.stdout.write('Content-Disposition: attachment; '
                'filename=%s\r\n\r\n'%filename)
            sys.stdout.write(FileFromInputStream(stream).read())
            return
        elif 'addStream' in form:
            # "Parse Substream"
            spath = cgi.escape(form['addStream'].value)
            new_stream = parser[spath.rstrip('/')].getSubIStream()
            streamdata = FileFromInputStream(new_stream).read()
            new_parser = guessParser(new_stream)
            if new_parser:
                stream = new_stream
                parser = new_parser
                tags = getattr(stream,'tags',[])
                streamname = data['streams'][stream_id][2]+':'
                data['streams'].append((tags, streamdata, streamname+spath))
                try:
                    if force_substream_ref:
                        raise Exception("Use references for all substreams")
                    save_data(data, sessid)
                except Exception:
                    # many things could go wrong with pickling
                    data['streams'][-1] = (data['streams'][stream_id],
                        spath, streamname+spath)
                    save_data(data, sessid)
                path = '/'
                hpath += ':/'
                stream_id = len(data['streams'])-1
            else:
                sys.stderr.write("Cannot parse substream %s: "
                    "No suitable parser\n"%spath)
        elif 'delStream' in form:
            # "Delete Stream"
            n = int(form['delStream'].value)
            paths = hpath.split(':')
            del paths[n]
            del data['streams'][n]
            if n >= len(data['streams']):
                stream_id = 0
            else:
                stream_id = n
            path = paths[stream_id]
            hpath = ':'.join(paths)
            save_data(data, sessid)
            stream, parser = get_parser(data, data['streams'][stream_id],
                sessid)
        # update client's variables
        c = SimpleCookie()
        c['hpath'] = hpath
        c['stream'] = str(stream_id)
        print c # send cookie to client
        # send headers
        print 'Content-Type: text/html'
        print
        # breadcrumb trail path up top
        print_path(path, data, stream_id)
        # fields
        print '''<table id="maintable" border="1">
<tr class="header">
    <th class="headertext">Offset</th>
    <th class="headertext">Name</th>
    <th class="headertext">Type</th>
    <th class="headertext">Size</th>
    <th class="headertext">Description</th>
    <th class="headertext">Data</th>
    <th class="headertext">Download Field</th>
</tr>'''
        for i in parser[path]:
            # determine options
            display = i.raw_display if form.getfirst('raw','0') == '1'\
                else i.display
            disp_off = bits2hex if form.getfirst('hex','0') == '1'\
                else bits2dec
            addr = i.address if form.getfirst('rel','0') == '1'\
                else i.absolute_address
            if display == 'None':
                display = ''
            # clickable name for field sets
            if i.is_field_set:
                name = '''<span href="#" onClick="goPath('%s%s/')"\
 class="fieldlink">%s/</span>'''%(path, i.name, i.name)
            else:
                name = i.name
            print '<tr class="data">'
            print '<td class="fldaddress">%s</td>'%disp_off(addr)
            print '<td class="fldname">%s</td>'%name
            print '<td class="fldtype">%s</td>'%i.__class__.__name__
            print '<td class="fldsize">%s</td>'%disp_off(i.size)
            print '<td class="flddesc">%s</td>'%i.description
            print '<td class="flddisplay">%s</td>'%display
            print '<td class="flddownload">'
            paths = hpath.split(':')
            paths[stream_id] += i.name
            url = "%s?hpath=%s&stream=%s"%\
                (script_name,':'.join(paths), stream_id)
            # hack to determine if a substream is present
            # the default getSubIStream() returns InputFieldStream()
            # InputFieldStream() then returns an InputSubStream.
            # in all the overrides, the return is a different stream type,
            # but this is certainly not the safest way to check for
            # an overridden method...
            # finally, if the field is a SubFile, then it has a custom
            # substream, and thus gets the substream features.
            if not isinstance(i.getSubIStream(), InputSubStream)\
                or isinstance(i, SubFile):
                print '<a href="javascript:addStream(\'%s\')"\
 class="dllink">Parse Substream</a><br/>'%(path+i.name)
                print '<a href="%s&savesub=1"\
 class="dllink">Download Substream</a><br/>'%url
                print '<a href="%s&save=1"\
 class="dllink">Download Raw</a>'%url
            else:
                print '<a href="%s&save=1"\
 class="dllink">Download</a>'%url
            print '</td>'
            print '</tr>'
        print '</table>'
        print_path(path, data, stream_id)
        if sys.stderr.getvalue():
            print_error('Error(s) encountered:', print_headers=False)
            print '<pre class="parseerror">%s</pre>'%sys.stderr.getvalue()
    else:
        print_form('Note: Cookies MUST be enabled!')
    def __init__(self, parent, name, format, description=None,
    strip=None, charset=None, nbytes=None, truncate=None):
        Bytes.__init__(self, parent, name, 1, description)

        # Is format valid?
        assert format in self.VALID_FORMATS

        # Store options
        self._format = format
        self._strip = strip
        self._truncate = truncate

        # Check charset and compute character size in bytes
        # (or None when it's not possible to guess character size)
        if not charset or charset in self.CHARSET_8BIT:
            self._character_size = 1   # one byte per character
        elif charset in self.UTF_CHARSET:
            self._character_size = None
        else:
            raise FieldError("Invalid charset for %s: \"%s\"" %
                (self.path, charset))
        self._charset = charset

        # It is a fixed string?
        if nbytes is not None:
            assert self._format == "fixed"
            # Arbitrary limits, just to catch some bugs...
            if not (1 <= nbytes <= 0xffff):
                raise FieldError("Invalid string size for %s: %s" %
                    (self.path, nbytes))
            self._content_size = nbytes   # content length in bytes
            self._size = nbytes * 8
            self._content_offset = 0
        else:
            # Format with a suffix: Find the end of the string
            if self._format in self.SUFFIX_FORMAT:
                self._content_offset = 0

                # Choose the suffix
                suffix = self.suffix_str

                # Find the suffix
                length = self._parent.stream.searchBytesLength(
                    suffix, False, self.absolute_address)
                if length is None:
                    raise FieldError("Unable to find end of string %s (format %s)!"
                        % (self.path, self._format))
                if 1 < len(suffix):
                    # Fix length for little endian bug with UTF-xx charset:
                    #   u"abc" -> "a\0b\0c\0\0\0" (UTF-16-LE)
                    #   search returns length=5, whereas real lenght is 6
                    length = alignValue(length, len(suffix))

                # Compute sizes
                self._content_size = length # in bytes
                self._size = (length + len(suffix)) * 8

            # Format with a prefix: Read prefixed length in bytes
            else:
                assert self._format in self.PASCAL_FORMATS

                # Get the prefix size
                prefix_size = self.PASCAL_FORMATS[self._format]
                self._content_offset = prefix_size

                # Read the prefix and compute sizes
                value = self._parent.stream.readBits(
                    self.absolute_address, prefix_size*8, self._parent.endian)
                self._content_size = value   # in bytes
                self._size = (prefix_size + value) * 8

        # For UTF-16 and UTF-32, choose the right charset using BOM
        if self._charset in self.UTF_CHARSET:
            # Charset requires a BOM?
            bomsize, endian  = self.UTF_CHARSET[self._charset]
            if endian == "BOM":
                # Read the BOM value
                nbytes = bomsize // 8
                bom = self._parent.stream.readBytes(self.absolute_address, nbytes)

                # Choose right charset using the BOM
                bom_endian = self.UTF_BOM[bomsize]
                if bom not in bom_endian:
                    raise FieldError("String %s has invalid BOM (%s)!"
                        % (self.path, repr(bom)))
                self._charset = bom_endian[bom]
                self._content_size -= nbytes
                self._content_offset += nbytes

        # Compute length in character if possible
        if self._character_size:
            self._length = self._content_size //  self._character_size
        else:
            self._length = None
    def __init__(self, parent, name, format, description=None,
    strip=None, charset=None, nbytes=None, truncate=None):
        Bytes.__init__(self, parent, name, 1, description)

        # Is format valid?
        assert format in self.VALID_FORMATS

        # Store options
        self._format = format
        self._strip = strip
        self._truncate = truncate

        # Check charset and compute character size in bytes
        # (or None when it's not possible to guess character size)
        if not charset or charset in self.CHARSET_8BIT:
            self._character_size = 1   # one byte per character
        elif charset in self.UTF_CHARSET:
            self._character_size = None
        else:
            raise FieldError("Invalid charset for %s: \"%s\"" %
                (self.path, charset))
        self._charset = charset

        # It is a fixed string?
        if nbytes is not None:
            assert self._format == "fixed"
            # Arbitrary limits, just to catch some bugs...
            if not (1 <= nbytes <= 0xffff):
                raise FieldError("Invalid string size for %s: %s" %
                    (self.path, nbytes))
            self._content_size = nbytes   # content length in bytes
            self._size = nbytes * 8
            self._content_offset = 0
        else:
            # Format with a suffix: Find the end of the string
            if self._format in self.SUFFIX_FORMAT:
                self._content_offset = 0

                # Choose the suffix
                suffix = self.suffix_str

                # Find the suffix
                length = self._parent.stream.searchBytesLength(
                    suffix, False, self.absolute_address)
                if length is None:
                    raise FieldError("Unable to find end of string %s (format %s)!"
                        % (self.path, self._format))
                if 1 < len(suffix):
                    # Fix length for little endian bug with UTF-xx charset:
                    #   u"abc" -> "a\0b\0c\0\0\0" (UTF-16-LE)
                    #   search returns length=5, whereas real lenght is 6
                    length = alignValue(length, len(suffix))

                # Compute sizes
                self._content_size = length # in bytes
                self._size = (length + len(suffix)) * 8

            # Format with a prefix: Read prefixed length in bytes
            else:
                assert self._format in self.PASCAL_FORMATS

                # Get the prefix size
                prefix_size = self.PASCAL_FORMATS[self._format]
                self._content_offset = prefix_size

                # Read the prefix and compute sizes
                value = self._parent.stream.readBits(
                    self.absolute_address, prefix_size*8, self._parent.endian)
                self._content_size = value   # in bytes
                self._size = (prefix_size + value) * 8

        # For UTF-16 and UTF-32, choose the right charset using BOM
        if self._charset in self.UTF_CHARSET:
            # Charset requires a BOM?
            bomsize, endian  = self.UTF_CHARSET[self._charset]
            if endian == "BOM":
                # Read the BOM value
                nbytes = bomsize // 8
                bom = self._parent.stream.readBytes(self.absolute_address, nbytes)

                # Choose right charset using the BOM
                bom_endian = self.UTF_BOM[bomsize]
                if bom not in bom_endian:
                    raise FieldError("String %s has invalid BOM (%s)!"
                        % (self.path, repr(bom)))
                self._charset = bom_endian[bom]
                self._content_size -= nbytes
                self._content_offset += nbytes

        # Compute length in character if possible
        if self._character_size:
            self._length = self._content_size //  self._character_size
        else:
            self._length = None
Exemple #25
0
def calc_byte_range(start, end):
    return byte_addr(start), byte_addr(alignValue(end, 8))
 def __init__(self, parent, name):
     FieldSet.__init__(self, parent, name)
     self._size = alignValue(self["size"].value, 4) * 8
 def __init__(self, parent, name, is_32bit=True):
     FieldSet.__init__(self, parent, name)
     self._size = alignValue(self["size"].value, 4) * 8
     self.is_32bit = is_32bit
Exemple #28
0
 def __init__(self, parent, name):
     FieldSet.__init__(self, parent, name)
     self._size = alignValue(self["size"].value, 4) * 8
Exemple #29
0
def handle_form():
    """Process submitted data.

    See comments for details.

    """
    prune_old()
    form = cgi.FieldStorage()
    if 'file' in form and form['file'].file:
        # compute session id
        sessid = get_sessid()
        if not sessid:
            rand = str(time.time()) + form['file'].filename + str(
                random.random())
            sessid = hashlib.md5(rand).hexdigest()
        # write uploaded file
        f = open(tmp_dir + sessid + '.file', 'wb')
        if form['file'].done == -1:
            raise ValueError("File upload canceled?")
        while f.tell() < 2**22:  # 4MB limit
            chunk = form['file'].file.read(32768)  # 32KB chunks
            if not chunk:
                break
            f.write(chunk)
        if f.tell() == 0:
            f.close()
            print_form('Nothing uploaded.')
            return
        f.close()
        # write session variables
        try:
            fn = unicode(form['file'].filename, 'utf-8')
        except UnicodeDecodeError:
            fn = unicode(form['file'].filename, 'iso-8859-1')
        # stream "None" represents the original stream
        save_data({'filename': fn, 'streams': [(None, None, fn)]}, sessid)
        # send session id and reset variables
        c = SimpleCookie()
        c['sess'] = sessid
        c['hpath'] = '/'  # clear path var.
        c['stream'] = '0'  # clear stream var
        print c  # send cookie to client (headers)
        print_page()  # print AJAX frame page
    elif get_sessid():  # or perhaps you already have a file to parse?
        if not 'hpath' in form:
            print_page()
            return
        # redirect stderr, so we can catch parser errors
        sys.stderr = StringIO()
        # load variables
        hpath = cgi.escape(form.getfirst('hpath', '/'))
        stream_id = int(form.getfirst('stream', '0'))
        path = hpath.split(':')[stream_id]
        sessid = get_sessid()
        try:
            data = cPickle.load(file(tmp_dir + sessid + '.sess', 'rb'))
        except IOError:
            print_error('Your file was deleted due to inactivity. '
                        'Please upload a new one.')
            return
        stream, parser = get_parser(data, data['streams'][stream_id], sessid)
        if parser is None:
            return  # sorry, couldn't parse file!
        if 'save' in form:
            # "Download Raw"
            f = FileFromInputStream(stream)
            fld = parser[path]
            f.seek(fld.absolute_address / 8)
            size = alignValue(fld.size, 8) / 8
            sys.stdout.write('Content-Type: application/octet-stream\r\n')
            sys.stdout.write('Content-Length: %i\r\n' % size)
            sys.stdout.write('Content-Disposition: attachment; '
                             'filename=%s\r\n\r\n' %
                             path.strip('/').split('/')[-1])
            sys.stdout.write(f.read(size))
            return
        elif 'savesub' in form:
            # "Download Substream"
            stream = parser[path.rstrip('/')].getSubIStream()
            filename = path.strip('/').split('/')[-1]
            tags = getattr(stream, 'tags', [])
            for tag in tags:
                if tag[0] == 'filename':
                    filename = tag[1]
            sys.stdout.write('Content-Type: application/octet-stream\r\n')
            sys.stdout.write('Content-Disposition: attachment; '
                             'filename=%s\r\n\r\n' % filename)
            sys.stdout.write(FileFromInputStream(stream).read())
            return
        elif 'addStream' in form:
            # "Parse Substream"
            spath = cgi.escape(form['addStream'].value)
            new_stream = parser[spath.rstrip('/')].getSubIStream()
            streamdata = FileFromInputStream(new_stream).read()
            new_parser = guessParser(new_stream)
            if new_parser:
                stream = new_stream
                parser = new_parser
                tags = getattr(stream, 'tags', [])
                streamname = data['streams'][stream_id][2] + ':'
                data['streams'].append((tags, streamdata, streamname + spath))
                try:
                    if force_substream_ref:
                        raise Exception("Use references for all substreams")
                    save_data(data, sessid)
                except Exception:
                    # many things could go wrong with pickling
                    data['streams'][-1] = (data['streams'][stream_id], spath,
                                           streamname + spath)
                    save_data(data, sessid)
                path = '/'
                hpath += ':/'
                stream_id = len(data['streams']) - 1
            else:
                sys.stderr.write("Cannot parse substream %s: "
                                 "No suitable parser\n" % spath)
        elif 'delStream' in form:
            # "Delete Stream"
            n = int(form['delStream'].value)
            paths = hpath.split(':')
            del paths[n]
            del data['streams'][n]
            if n >= len(data['streams']):
                stream_id = 0
            else:
                stream_id = n
            path = paths[stream_id]
            hpath = ':'.join(paths)
            save_data(data, sessid)
            stream, parser = get_parser(data, data['streams'][stream_id],
                                        sessid)
        # update client's variables
        c = SimpleCookie()
        c['hpath'] = hpath
        c['stream'] = str(stream_id)
        print c  # send cookie to client
        # send headers
        print 'Content-Type: text/html'
        print
        # breadcrumb trail path up top
        print_path(path, data, stream_id)
        # fields
        print '''<table id="maintable" border="1">
<tr class="header">
    <th class="headertext">Offset</th>
    <th class="headertext">Name</th>
    <th class="headertext">Type</th>
    <th class="headertext">Size</th>
    <th class="headertext">Description</th>
    <th class="headertext">Data</th>
    <th class="headertext">Download Field</th>
</tr>'''
        for i in parser[path]:
            # determine options
            display = i.raw_display if form.getfirst('raw','0') == '1'\
                else i.display
            disp_off = bits2hex if form.getfirst('hex','0') == '1'\
                else bits2dec
            addr = i.address if form.getfirst('rel','0') == '1'\
                else i.absolute_address
            if display == 'None':
                display = ''
            # clickable name for field sets
            if i.is_field_set:
                name = '''<span href="#" onClick="goPath('%s%s/')"\
 class="fieldlink">%s/</span>''' % (path, i.name, i.name)
            else:
                name = i.name
            print '<tr class="data">'
            print '<td class="fldaddress">%s</td>' % disp_off(addr)
            print '<td class="fldname">%s</td>' % name
            print '<td class="fldtype">%s</td>' % i.__class__.__name__
            print '<td class="fldsize">%s</td>' % disp_off(i.size)
            print '<td class="flddesc">%s</td>' % i.description
            print '<td class="flddisplay">%s</td>' % display
            print '<td class="flddownload">'
            paths = hpath.split(':')
            paths[stream_id] += i.name
            url = "%s?hpath=%s&stream=%s"%\
                (script_name,':'.join(paths), stream_id)
            # hack to determine if a substream is present
            # the default getSubIStream() returns InputFieldStream()
            # InputFieldStream() then returns an InputSubStream.
            # in all the overrides, the return is a different stream type,
            # but this is certainly not the safest way to check for
            # an overridden method...
            # finally, if the field is a SubFile, then it has a custom
            # substream, and thus gets the substream features.
            if not isinstance(i.getSubIStream(), InputSubStream)\
                or isinstance(i, SubFile):
                print '<a href="javascript:addStream(\'%s\')"\
 class="dllink">Parse Substream</a><br/>' % (path + i.name)
                print '<a href="%s&savesub=1"\
 class="dllink">Download Substream</a><br/>' % url
                print '<a href="%s&save=1"\
 class="dllink">Download Raw</a>' % url
            else:
                print '<a href="%s&save=1"\
 class="dllink">Download</a>' % url
            print '</td>'
            print '</tr>'
        print '</table>'
        print_path(path, data, stream_id)
        if sys.stderr.getvalue():
            print_error('Error(s) encountered:', print_headers=False)
            print '<pre class="parseerror">%s</pre>' % sys.stderr.getvalue()
    else:
        print_form('Note: Cookies MUST be enabled!')
Exemple #30
0
    def createFields(self):
        yield String(self, "magic", 4, "Signature (BLP2)")
        yield Enum(UInt32(self, "compression", "Compression type"), {
            0: "JPEG Compressed",
            1: "Uncompressed or DXT/S3TC compressed"
        })
        yield Enum(UInt8(self, "encoding", "Encoding type"), {
            1: "Raw",
            2: "DXT/S3TC Texture Compression (a.k.a. DirectX)"
        })
        yield UInt8(self, "alpha_depth",
                    "Alpha channel depth, in bits (0 = no alpha)")
        yield Enum(
            UInt8(self, "alpha_encoding", "Encoding used for alpha channel"), {
                0: "DXT1 alpha (0 or 1 bit alpha)",
                1: "DXT3 alpha (4 bit alpha)",
                7: "DXT5 alpha (8 bit interpolated alpha)"
            })
        yield Enum(
            UInt8(self, "has_mips", "Are mip levels present?"), {
                0: "No mip levels",
                1:
                "Mip levels present; number of levels determined by image size"
            })
        yield UInt32(self, "width", "Base image width")
        yield UInt32(self, "height", "Base image height")
        for i in xrange(16):
            yield UInt32(self, "mipmap_offset[]")
        for i in xrange(16):
            yield UInt32(self, "mipmap_size[]")
        yield PaletteRGBA(self, "palette", 256)

        compression = self["compression"].value
        encoding = self["encoding"].value
        alpha_depth = self["alpha_depth"].value
        alpha_encoding = self["alpha_encoding"].value
        width = self["width"].value
        height = self["height"].value

        if compression == 0:  # JPEG Compression
            yield UInt32(self, "jpeg_header_len")
            yield RawBytes(self, "jpeg_header", self["jpeg_header_len"].value,
                           "Shared JPEG Header")

        offsets = self.array("mipmap_offset")
        sizes = self.array("mipmap_size")
        for i in xrange(16):
            if not offsets[i].value or not sizes[i].value:
                continue
            padding = self.seekByte(offsets[i].value)
            if padding:
                yield padding
            if compression == 0:
                yield RawBytes(
                    self, "mipmap[%i]" % i, sizes[i].value,
                    "JPEG data, append to header to recover complete image")
            elif compression == 1 and encoding == 1:
                yield Generic2DArray(self, "mipmap_indexes[%i]" % i, height,
                                     width, PaletteIndex, "row", "index",
                                     "Indexes into the palette")
                if alpha_depth == 1:
                    yield GenericVector(self, "mipmap_alphas[%i]" % i, height,
                                        width, Bit, "row", "is_opaque",
                                        "Alpha values")
                elif alpha_depth == 8:
                    yield GenericVector(self, "mipmap_alphas[%i]" % i, height,
                                        width, UInt8, "row", "alpha",
                                        "Alpha values")
            elif compression == 1 and encoding == 2:
                block_height = alignValue(height, 4) // 4
                block_width = alignValue(width, 4) // 4
                if alpha_depth in [0, 1] and alpha_encoding == 0:
                    yield Generic2DArray(self, "mipmap[%i]" % i, block_height,
                                         block_width, DXT1, "row", "block",
                                         "DXT1-compressed image blocks")
                elif alpha_depth == 8 and alpha_encoding == 1:
                    yield Generic2DArray(self, "mipmap[%i]" % i, block_height,
                                         block_width, DXT3, "row", "block",
                                         "DXT3-compressed image blocks")
                elif alpha_depth == 8 and alpha_encoding == 7:
                    yield Generic2DArray(self, "mipmap[%i]" % i, block_height,
                                         block_width, DXT5, "row", "block",
                                         "DXT5-compressed image blocks")
            width /= 2
            height /= 2
Exemple #31
0
 def __init__(self, parent, name, width, pixel_class):
     FieldSet.__init__(self, parent, name)
     self._pixel = pixel_class
     self._width = width
     self._size = alignValue(self._width * self._pixel.static_size, 32)
Exemple #32
0
 def __init__(self, parent, name, is_32bit=True):
     FieldSet.__init__(self, parent, name)
     self._size = alignValue(self["size"].value, 4) * 8
     self.is_32bit = is_32bit
Exemple #33
0
    def createFields(self):
        yield String(self, "magic", 4, "Signature (BLP2)")
        yield Enum(UInt32(self, "compression", "Compression type"), {
            0:"JPEG Compressed",
            1:"Uncompressed or DXT/S3TC compressed"})
        yield Enum(UInt8(self, "encoding", "Encoding type"), {
            1:"Raw",
            2:"DXT/S3TC Texture Compression (a.k.a. DirectX)"})
        yield UInt8(self, "alpha_depth", "Alpha channel depth, in bits (0 = no alpha)")
        yield Enum(UInt8(self, "alpha_encoding", "Encoding used for alpha channel"), {
            0:"DXT1 alpha (0 or 1 bit alpha)",
            1:"DXT3 alpha (4 bit alpha)",
            7:"DXT5 alpha (8 bit interpolated alpha)"})
        yield Enum(UInt8(self, "has_mips", "Are mip levels present?"), {
            0:"No mip levels",
            1:"Mip levels present; number of levels determined by image size"})
        yield UInt32(self, "width", "Base image width")
        yield UInt32(self, "height", "Base image height")
        for i in xrange(16):
            yield UInt32(self, "mipmap_offset[]")
        for i in xrange(16):
            yield UInt32(self, "mipmap_size[]")
        yield PaletteRGBA(self, "palette", 256)

        compression = self["compression"].value
        encoding = self["encoding"].value
        alpha_depth = self["alpha_depth"].value
        alpha_encoding = self["alpha_encoding"].value
        width = self["width"].value
        height = self["height"].value

        if compression == 0: # JPEG Compression
            yield UInt32(self, "jpeg_header_len")
            yield RawBytes(self, "jpeg_header", self["jpeg_header_len"].value, "Shared JPEG Header")

        offsets = self.array("mipmap_offset")
        sizes = self.array("mipmap_size")
        for i in xrange(16):
            if not offsets[i].value or not sizes[i].value:
                continue
            padding = self.seekByte(offsets[i].value)
            if padding:
                yield padding
            if compression == 0:
                yield RawBytes(self, "mipmap[%i]" % i, sizes[i].value, "JPEG data, append to header to recover complete image")
            elif compression == 1 and encoding == 1:
                yield Generic2DArray(self, "mipmap_indexes[%i]" % i, height, width, PaletteIndex, "row", "index", "Indexes into the palette")
                if alpha_depth == 1:
                    yield GenericVector(self, "mipmap_alphas[%i]" % i, height, width, Bit, "row", "is_opaque", "Alpha values")
                elif alpha_depth == 8:
                    yield GenericVector(self, "mipmap_alphas[%i]" % i, height, width, UInt8, "row", "alpha", "Alpha values")
            elif compression == 1 and encoding == 2:
                block_height = alignValue(height, 4) // 4
                block_width = alignValue(width, 4) // 4
                if alpha_depth in [0, 1] and alpha_encoding == 0:
                    yield Generic2DArray(self, "mipmap[%i]" % i, block_height, block_width, DXT1, "row", "block", "DXT1-compressed image blocks")
                elif alpha_depth == 8 and alpha_encoding == 1:
                    yield Generic2DArray(self, "mipmap[%i]" % i, block_height, block_width, DXT3, "row", "block", "DXT3-compressed image blocks")
                elif alpha_depth == 8 and alpha_encoding == 7:
                    yield Generic2DArray(self, "mipmap[%i]" % i, block_height, block_width, DXT5, "row", "block", "DXT5-compressed image blocks")
            width /= 2
            height /= 2