def parse_frame_script_list(blob): buf = SeqBuffer(blob) scripts = [] while not buf.at_eof(): [castlib_nr, member_nr, extra] = buf.unpack('>HHi') scripts.append(((castlib_nr, member_nr), extra)) return scripts
def read_ILS_section_into(ils_data, entries_by_nr, dest): buf = SeqBuffer(ils_data) while not buf.at_eof(): nr = buf.unpackVarint() section = entries_by_nr[nr] data = buf.readBytes(section.size) section.set_bytes(data)
def parse_cast_table_section(blob, loader_context): buf = SeqBuffer(blob) res = [] while not buf.at_eof(): (item,) = buf.unpack('>i') res.append(item) return res
def parse_cast_table_section(blob, loader_context): buf = SeqBuffer(blob) res = [] while not buf.at_eof(): (item, ) = buf.unpack('>i') res.append(item) return res
def parse_score_entry_nr0(blob): buf = SeqBuffer(blob) [actualSize, c2, frameCount, c4, sprite_size, c6, v7] = buf.unpack('>3i4h') print "DB| Score root primary: header=%s" % [actualSize, c2, frameCount, c4, sprite_size, c6, v7] sprite_count = v7 + 6 print "DB| Score root primary: extra=%s" % [c2, c4, c6, v7] #print "DB | Score root <primary: residue=<%s>" % (buf.buf[actualSize:],) buf = SeqBuffer(blob[buf.tell():actualSize]) maxOffset = 0 totItNr = 0 deltas = [] for frNr in range(1, frameCount+1): frNr += 1 [frameDataLength] = buf.unpack('>H') frBuf = SeqBuffer(buf.readBytes(frameDataLength-2)) #print "DB| Score root framedata raw=<%s>" % frBuf.buf delta_items = [] itNr = 0 while not frBuf.at_eof(): itNr += 1; totItNr += 1 [itemLength,offset] = frBuf.unpack('>HH') if offset > maxOffset: maxOffset = offset s = frBuf.readBytes(itemLength) delta_items.append(FrameDeltaItem(offset,s)) #print "DB| Score framedata entry [%d][%d] (len %d): %d/0x%x, <%s>" % (frNr, itNr, itemLength, offset, offset, s) deltas.append(FrameDelta(delta_items)) #print "DB| Score framedata = %s" % deltas #print "DB| Score framedata: maxOffset = %d (%d*48) chNr=%d totItNr=%d" % (maxOffset, maxOffset // 48, frNr, totItNr) return (sprite_count, sprite_size, deltas)
def parse_abmp_section(blob, file): buf = SeqBuffer(blob) v1 = buf.unpackVarint() v2 = buf.unpackVarint() section_count = buf.unpackVarint() print("ABMP header: %s" % [v1,v2, section_count]) #w1sum=0; w2sum=0; w3sum=0; w4sum=0 csum=0; usum=0 sections = [] for i in range(section_count): id = buf.unpackVarint() offset = buf.unpackVarint() # offset comp_size = buf.unpackVarint() # size in file decomp_size = buf.unpackVarint() # size, decompressed repr_mode = buf.unpackVarint() [tag] = buf.unpack('<4s') tag = rev(tag) print("ABMP entry: %s" % ([id, tag, offset, comp_size, decomp_size, repr_mode])) sections.append(ABMPEntry(id, tag, comp_size, offset, repr_mode)) print "Bytes left in ABMP section: %d" % buf.bytes_left() # print "Sums: %s" % [csum, usum] return sections
def parse_score_entry_nr1(blob): buf = SeqBuffer(blob) [count] = buf.unpack('>i') table = [] for i in range(count): [idx] = buf.unpack('>i') table.append(idx) return table
def read_bytes(self): file = self.file file.seek(self.offset) xheader = file.read(8) buf = SeqBuffer(xheader, self.loader_context.is_little_endian) tag = buf.readTag() #[dummy_size] = buf.unpack('>i', '<i') if tag != self.tag: raise Exception("section header is actually %s, not %s as expected" % (tag, self.tag)) return file.read(self.size)
def parse_cast_order_section(blob, loader_context): print "DB| parse_cast_order_section..." buf = SeqBuffer(blob, loader_context) [_zero1, _zero2, nElems, nElems2, v5] = buf.unpack('>5i') print "DB| parse_cast_order_section: header: %s" % ([_zero1, _zero2, nElems, nElems2, v5],) table = [] for i in range(nElems): [castlib_nr, castmember_nr] = buf.unpack('>HH') print "DB| parse_cast_order_section #%d: %s" % (i, (castlib_nr,castmember_nr)) table.append((castlib_nr,castmember_nr)) return table
def read_bytes(self): file = self.file file.seek(self.offset) xheader = file.read(8) buf = SeqBuffer(xheader, self.loader_context.is_little_endian) tag = buf.readTag() #[dummy_size] = buf.unpack('>i', '<i') if tag != self.tag: raise Exception( "section header is actually %s, not %s as expected" % (tag, self.tag)) return file.read(self.size)
def parse_cast_order_section(blob, loader_context): print("DB| parse_cast_order_section...") buf = SeqBuffer(blob, loader_context) [_zero1, _zero2, nElems, nElems2, v5] = buf.unpack('>5i') print("DB| parse_cast_order_section: header: %s" % ([_zero1, _zero2, nElems, nElems2, v5], )) table = [] for i in range(nElems): [castlib_nr, castmember_nr] = buf.unpack('>HH') print("DB| parse_cast_order_section #%d: %s" % (i, (castlib_nr, castmember_nr))) table.append((castlib_nr, castmember_nr)) return table
def find_and_read_section(f, tag_to_find, loader_context): while True: xheader = f.read(8) buf = SeqBuffer(xheader, loader_context.is_little_endian) tag = buf.readTag() [size] = buf.unpack('>i', '<i') print(" tag=%s" % tag) if tag == tag_to_find: blob = f.read(size) return parse_mmap_section(blob, f, loader_context) else: f.seek(size, 1)
def find_and_read_section(f, tag_to_find, loader_context): while True: xheader = f.read(8) buf = SeqBuffer(xheader, loader_context.is_little_endian) tag = buf.readTag() [size] = buf.unpack('>i', '<i') print(" tag=%s" % tag) if tag==tag_to_find: blob = f.read(size) return parse_mmap_section(blob, f, loader_context) else: f.seek(size, 1)
def __init__(self, snr, tag, blob): Media.__init__(self, snr, tag, blob) if blob.startswith(b'\xff\xd8'): self.media_type = b"JPEG" self.hdr = None elif blob.startswith(b'\0\0\1@'): self.media_type = "MP3" buf = SeqBuffer(blob) [hdr_len] = buf.unpack('>I') self.hdr = buf.readBytes(hdr_len) self.music = buf.peek_bytes_left() else: self.media_type = repr(blob[:16]) self.hdr = None
def parse(blob,snr, loader_context): buf = SeqBuffer(blob) [type,common_length,v2] = buf.unpack('>3i') common_blob = buf.readBytes(common_length) buf2 = SeqBuffer(common_blob) [v3,v4,v5,v6,cast_id,nElems] = buf2.unpack('>5iH') offsets = [] for i in range(nElems+1): [tmp] = buf2.unpack('>i') offsets.append(tmp) blob_after_table=buf2.peek_bytes_left() attrs = [] for i in range(len(offsets)-1): attr = blob_after_table[offsets[i]:offsets[i+1]] print "DB| Cast member attr #%d: <%s>" % (i, attr) attrs.append(attr) if len(attrs)>=2 and len(attrs[1])>0: name = SeqBuffer(attrs[1]).unpackString8() else: name = None print "DB| Cast-member common: name=\"%s\" attrs=%s misc=%s" % ( name, attrs, [v2,v3,v4,v5,v6, cast_id]) noncommon = buf.peek_bytes_left() castdata = CastMember.parse_castdata(type, cast_id, SeqBuffer(noncommon), attrs) res = CastMember(snr,type, name, attrs, castdata) return res
def parse_mmap_section(blob, file, loader_context): buf = SeqBuffer(blob, loader_context.is_little_endian) [v1,v2,nElems,nUsed,junkPtr,v3,freePtr] = buf.unpack('>HHiiiii', '<HHiiiii') print("mmap header: %s" % [v1,v2,nElems,nUsed,junkPtr,v3,freePtr]) sections = [] for i in range(nUsed): tag = buf.readTag() [size, offset, w1,w2, link] = buf.unpack('>IIhhi', '<IIhhi') #print("mmap entry: %s" % [tag, size, offset, w1,w2, link]) if tag=="free" or tag=="junk": section = NullSection(tag) else: section = SectionImpl(tag, size, offset, file, loader_context) sections.append(section) return SectionMap(sections)
def parse_mmap_section(blob, file, loader_context): buf = SeqBuffer(blob, loader_context.is_little_endian) [v1, v2, nElems, nUsed, junkPtr, v3, freePtr] = buf.unpack('>HHiiiii', '<HHiiiii') print("mmap header: %s" % [v1, v2, nElems, nUsed, junkPtr, v3, freePtr]) sections = [] for i in range(nUsed): tag = buf.readTag() [size, offset, w1, w2, link] = buf.unpack('>IIhhi', '<IIhhi') #print("mmap entry: %s" % [tag, size, offset, w1,w2, link]) if tag == b"free" or tag == b"junk": section = NullSection(tag) else: section = SectionImpl(tag, size, offset, file, loader_context) sections.append(section) return SectionMap(sections)
def parse_cast_lib_section(blob, loader_context): # Read header: buf = SeqBuffer(blob) [v1,nElems,ofsPerElem,nOffsets,v5] = buf.unpack('>iiHii') print "DB| Cast lib section header: nElems=%d, nOffsets=%d, ofsPerElem=%d, misc=%s" % (nElems, nOffsets, ofsPerElem, [v1,v5]) # Read offset table: offsets = [] for i in range(nOffsets): [offset] = buf.unpack('>i') offsets.append(offset) base = buf.tell() #print "DB| Cast lib section: offsets=%s" % offsets offnr = 0 table = [] for enr in range(nElems): entry = [] for i in range(ofsPerElem): subblob = buf.buf[base + offsets[offnr]:base + offsets[offnr+1]] offnr += 1 #print "DB| i=%d subblob=<%s>" % (i,subblob) buf2 = SeqBuffer(subblob) if i==0: item = buf2.unpackString8() elif i==1: if buf2.bytes_left()>0: item = buf2.unpackString8() else: item = None elif i==2: [item] = buf2.unpack('>h') elif i==3: [w1,w2,w3,w4] = buf2.unpack('>hhhh') item = (w1,w2,w3,w4) else: item = subblob entry.append(item) print "DB| Cast lib table entry #%d: %s" % (enr+1,entry) [name, path, _zero, (low_idx,high_idx, assoc_id, self_idx)] = entry table.append(CastLibrary(enr+1, name, path, assoc_id, (low_idx,high_idx), self_idx)) return CastLibraryTable(table)
def parse_assoc_table(blob, loader_context): """ Takes a 'KEY*' section and returns an AssociationTable. """ buf = SeqBuffer(blob, loader_context.is_little_endian) [v1,v2,nElems,nValid] = buf.unpack('>HHii', '<HHii') print("KEY* header: %s" % [v1,v2,nElems,nValid]) # v1 = table start offset, v2 = table entry size? atable = AssociationTable() for i in range(nValid): [owned_section_id, composite_id] = buf.unpack('>ii', '<ii') tag = buf.readTag() castlib_assoc_id = composite_id >> 16 owner_section_id = composite_id & 0xFFFF print "DB| KEY* entry #%d: %s" % (i, [tag, owned_section_id, castlib_assoc_id, owner_section_id]) if owner_section_id == 1024: atable.add_library_section(castlib_assoc_id, owned_section_id, tag) else: atable.add_cast_media(owner_section_id, owned_section_id, tag) return atable
def parse_frame_label_section(sections_map, loader_context): # Obtain section: vwlb_e = sections_map.entry_by_tag("VWLB") if vwlb_e == None: return None buf = SeqBuffer(vwlb_e.bytes()) [nElems] = buf.unpack('>H') offset_table = [] for i in range(nElems+1): [frame_nr, offset] = buf.unpack('>HH') offset_table.append((frame_nr, offset)) base_pos = buf.tell() label_table = [] for i in range(nElems): (frame_nr,offset1) = offset_table[i] (_ ,offset2) = offset_table[i+1] label = buf.pread_from_to(base_pos+offset1, base_pos+offset2) label_table.append((frame_nr, label)) print "DB| Frame labels: %s" % label_table return label_table
def parse_assoc_table(blob, loader_context): """ Takes a 'KEY*' section and returns an AssociationTable. """ buf = SeqBuffer(blob, loader_context.is_little_endian) [v1, v2, nElems, nValid] = buf.unpack('>HHii', '<HHii') print("KEY* header: %s" % [v1, v2, nElems, nValid]) # v1 = table start offset, v2 = table entry size? atable = AssociationTable() for i in range(nValid): [owned_section_id, composite_id] = buf.unpack('>ii', '<ii') tag = buf.readTag() castlib_assoc_id = composite_id >> 16 owner_section_id = composite_id & 0xFFFF print("DB| KEY* entry #%d: %s" % (i, [ bytes(tag), owned_section_id, castlib_assoc_id, owner_section_id ])) if owner_section_id == 1024: atable.add_library_section(castlib_assoc_id, owned_section_id, bytes(tag)) else: atable.add_cast_media(owner_section_id, owned_section_id, bytes(tag)) return atable
def parse_dir_config(blob, loader_context): buf = SeqBuffer(blob, loader_context) misc1 = buf.unpack('>32h') misc2 = buf.unpack('>3i') [palette] = buf.unpack('>i') misc3 = buf.peek_bytes_left() config = {'palette': palette, 'misc': [misc1, misc2, bytes(misc3)]} print("DB| parse_dir_config: %r" % (config, )) return config
def __init__(self, snr, tag, blob): Media.__init__(self, snr, tag, blob) buf = SeqBuffer(blob) [self.height, self.width] = buf.unpack('>II') to_read = self.height * self.width res = bytearray() while len(res) < to_read and not buf.at_eof(): [d] = buf.unpack('b') if d < 0: run_length = 1 - d v = buf.readBytes(1) res.extend(v * run_length) else: lit_length = 1 + d res.extend(buf.readBytes(lit_length)) self.decoded = bytes(res)
def __init__(self, snr, tag, blob): Media.__init__(self, snr, tag, blob) buf = SeqBuffer(blob) res = bytearray() while not buf.at_eof(): [d] = buf.unpack('b') if d < 0: run_length = 1 - d v = buf.readBytes(1) res.extend(v * run_length) else: lit_length = 1 + d res.extend(buf.readBytes(lit_length)) self.decoded = bytes(res)
def parse_frame_label_section(sections_map, loader_context): # Obtain section: vwlb_e = sections_map.entry_by_tag("VWLB") if vwlb_e == None: return None buf = SeqBuffer(vwlb_e.bytes()) [nElems] = buf.unpack('>H') offset_table = [] for i in range(nElems + 1): [frame_nr, offset] = buf.unpack('>HH') offset_table.append((frame_nr, offset)) base_pos = buf.tell() label_table = [] for i in range(nElems): (frame_nr, offset1) = offset_table[i] (_, offset2) = offset_table[i + 1] label = buf.pread_from_to(base_pos + offset1, base_pos + offset2) label_table.append((frame_nr, label)) print("DB| Frame labels: %s" % label_table) return label_table
def parse_score_section(sections_map, loader_context): # Obtain section: vwsc_e = sections_map.entry_by_tag("VWSC") if vwsc_e == None: return None # Parse header: buf = SeqBuffer(vwsc_e.bytes()) [totalLength, v1, v2, count1a, count1b, size2] = buf.unpack('>6i') # Usually, v1=-3, v2=12, count1b=count1a+1. print("DB| Score section: counts=%s size=%s misc=%s" % ([count1a, count1b], [size2], [v1, v2])) # Parse offset table: offsets = [] for i in range(count1b): [offset] = buf.unpack('>i') offsets.append(offset) print("DB| Score section offsets=%s" % offsets) base = buf.tell() def get_entry_bytes(idx): return buf.pread_from_to(base + offsets[idx], base + offsets[idx + 1]) # Parse entry index list: #print "DB| Score root primary raw: (len %d) <%s>" % (len(get_entry_bytes(0)), get_entry_bytes(0)) print("DB| Score root tertiary raw: <%s>" % get_entry_bytes(2)) (sprite_count, sprite_size, frame_table) = parse_score_entry_nr0(get_entry_bytes(0)) entry_indexes = parse_score_entry_nr1(get_entry_bytes(1)) print("DB| Occupied score entries (count=%d): %s" % (len(entry_indexes), entry_indexes)) # Parse entries: table = [] frame_scripts = [] for nr, primary_idx in enumerate(entry_indexes): prim = get_entry_bytes(primary_idx) sec = get_entry_bytes(primary_idx + 1) tert = get_entry_bytes(primary_idx + 2) primbuf = SeqBuffer(prim) [starttime, endtime, w3, w4, w5] = primbuf.unpack('>5i') [w6, w7, w8, w9, w10, w11, w12] = primbuf.unpack('>HiH4i') #print "DB| Score entry #%d@%d primary (len=%d): %s <%s>" % (nr, primary_idx, len(prim), [starttime, endtime, [w3,w4,w5,w6,w7,w8,w9,w10,w11,w12]], primbuf.peek_bytes_left()) print("DB| Score entry #%d@%d secondary (len=%d): <%s>" % (nr, primary_idx, len(sec), sec)) #print "DB| Score entry #%d@%d tertiary (len=%d): <%s>" % (nr, primary_idx, len(tert), tert) # if (i%3)==0 and len(entry)>0: # buf2 = SeqBuffer(entry) # [w1,w2,w3,w4,w5] = buf2.unpack('>5i') # [w6,w7,w8,w9,w10,w11,w12] = buf2.unpack('>HiH4i') # print "DB| Primary (#%d): %d %d %d %d %d %s// <%s>" % ( # i, w1,w2,w3,w4,w5, [w6,w7,w8,w9,w10,w11,w12], repr(entry)) # if (i%3)==1 and len(entry)>0: # buf2 = SeqBuffer(entry) # [w1,w2,w3] = buf2.unpack('>HHi') # print "DB| Secondary (#%d): %d %d %d // <%s>" % ( # i, w1,w2,w3, repr(entry)) entry = (prim, sec, tert) scripts_in_frame = parse_frame_script_list(sec) frame_scripts.append(scripts_in_frame) table.append(entry) #return table return FrameSequence(sprite_count, sprite_size, frame_table, frame_scripts)
def parse_score_section(sections_map, loader_context): # Obtain section: vwsc_e = sections_map.entry_by_tag("VWSC") if vwsc_e == None: return None # Parse header: buf = SeqBuffer(vwsc_e.bytes()) [totalLength, v1, v2, count1a, count1b, size2] = buf.unpack('>6i') # Usually, v1=-3, v2=12, count1b=count1a+1. print "DB| Score section: counts=%s size=%s misc=%s" % ([count1a,count1b],[size2],[v1,v2]) # Parse offset table: offsets = [] for i in range(count1b): [offset] = buf.unpack('>i') offsets.append(offset) print "DB| Score section offsets=%s" % offsets base = buf.tell() def get_entry_bytes(idx): return buf.pread_from_to(base + offsets[idx], base + offsets[idx+1]) # Parse entry index list: #print "DB| Score root primary raw: (len %d) <%s>" % (len(get_entry_bytes(0)), get_entry_bytes(0)) print "DB| Score root tertiary raw: <%s>" % get_entry_bytes(2) (sprite_count, sprite_size, frame_table) = parse_score_entry_nr0(get_entry_bytes(0)) entry_indexes = parse_score_entry_nr1(get_entry_bytes(1)) print "DB| Occupied score entries (count=%d): %s" % (len(entry_indexes), entry_indexes) # Parse entries: table = [] frame_scripts = [] for nr,primary_idx in enumerate(entry_indexes): prim = get_entry_bytes(primary_idx) sec = get_entry_bytes(primary_idx+1) tert = get_entry_bytes(primary_idx+2) primbuf = SeqBuffer(prim) [starttime,endtime,w3,w4,w5] = primbuf.unpack('>5i') [w6,w7,w8,w9,w10,w11,w12] = primbuf.unpack('>HiH4i') #print "DB| Score entry #%d@%d primary (len=%d): %s <%s>" % (nr, primary_idx, len(prim), [starttime, endtime, [w3,w4,w5,w6,w7,w8,w9,w10,w11,w12]], primbuf.peek_bytes_left()) print "DB| Score entry #%d@%d secondary (len=%d): <%s>" % (nr, primary_idx, len(sec), sec) #print "DB| Score entry #%d@%d tertiary (len=%d): <%s>" % (nr, primary_idx, len(tert), tert) # if (i%3)==0 and len(entry)>0: # buf2 = SeqBuffer(entry) # [w1,w2,w3,w4,w5] = buf2.unpack('>5i') # [w6,w7,w8,w9,w10,w11,w12] = buf2.unpack('>HiH4i') # print "DB| Primary (#%d): %d %d %d %d %d %s// <%s>" % ( # i, w1,w2,w3,w4,w5, [w6,w7,w8,w9,w10,w11,w12], repr(entry)) # if (i%3)==1 and len(entry)>0: # buf2 = SeqBuffer(entry) # [w1,w2,w3] = buf2.unpack('>HHi') # print "DB| Secondary (#%d): %d %d %d // <%s>" % ( # i, w1,w2,w3, repr(entry)) entry = (prim, sec, tert) scripts_in_frame = parse_frame_script_list(sec) frame_scripts.append(scripts_in_frame) table.append(entry) #return table return FrameSequence(sprite_count, sprite_size, frame_table, frame_scripts)
def parse(buf, *args): [type_length] = buf.unpack('>I') med_type = buf.readBytes(type_length) [rest_length] = buf.unpack('>I') info = buf.readBytes(rest_length) buf = SeqBuffer(info) if rest_length >= 4 and buf.readTag( ) == b'FLSH' and med_type == 'vectorShape': [sz2] = buf.unpack('>I') half_expect(sz2, rest_length, "XmedCastType.sz2") misc = buf.unpack('>24i') [npoints] = buf.unpack('>I') misc2 = buf.unpack('>35i') [proplen] = buf.unpack('>I') propname = buf.readBytes(proplen) points = [] for i in range(npoints): if i: [vx] = buf.unpack('>i') half_expect(vx, -0x80000000, 'XmedCastType.vx') pdesc = buf.unpack('>6i') points.append(pdesc) [proplen] = buf.unpack('>I') propname = buf.readBytes(proplen) half_expect(buf.peek_bytes_left(), b'', 'XmedCastType.trailer') print( "DB| XmedCastType.parse: misc=%r npoints=%r misc2=%r, points=%r" % (misc, npoints, misc2, points)) info = (misc, misc2, points) return XmedCastType(med_type, info, *args)
def parse_abmp_section(blob, file): buf = SeqBuffer(blob) v1 = buf.unpackVarint() v2 = buf.unpackVarint() section_count = buf.unpackVarint() print("ABMP header: %s" % [v1,v2, section_count]) #w1sum=0; w2sum=0; w3sum=0; w4sum=0 csum=0; usum=0 sections = [] for i in range(section_count): id = buf.unpackVarint() offset = buf.unpackVarint() # offset comp_size = buf.unpackVarint() # size in file decomp_size = buf.unpackVarint() # size, decompressed repr_mode = buf.unpackVarint() [tag] = buf.unpack('<4s') tag = rev(tag) print("ABMP entry: %s" % ([id, tag, offset, comp_size, decomp_size, repr_mode])) sections.append(ABMPEntry(id, tag, comp_size, offset, repr_mode)) print("Bytes left in ABMP section: %d" % buf.bytes_left()) # print "Sums: %s" % [csum, usum] return sections
def parse(blob, snr, loader_context): buf = SeqBuffer(blob) [type, common_length, v2] = buf.unpack('>3i') common_blob = buf.readBytes(common_length) buf2 = SeqBuffer(common_blob) [v3, v4, v5, v6, cast_id, nElems] = buf2.unpack('>5iH') offsets = [] for i in range(nElems + 1): [tmp] = buf2.unpack('>i') offsets.append(tmp) blob_after_table = buf2.peek_bytes_left() attrs = [] for i in range(len(offsets) - 1): attr = blob_after_table[offsets[i]:offsets[i + 1]] print("DB| Cast member attr #%d: <%r>" % (i, attr)) attrs.append(attr) if len(attrs) >= 2 and len(attrs[1]) > 0: name = SeqBuffer(attrs[1]).unpackString8() attrs[1] = None else: name = None if len(attrs) >= 18 and len(attrs[17]) == 4: [ctime] = struct.unpack('>I', attrs[17]) attrs[17] = None else: ctime = None if len(attrs) >= 19 and len(attrs[18]) == 4: [mtime] = struct.unpack('>I', attrs[18]) attrs[18] = None else: mtime = None print( "DB| Cast-member common: name=\"%s\" ctime=%s mtime=%s attrs=%s misc=%s" % (name, ctime and time.ctime(ctime), mtime and time.ctime(mtime), attrs, [v2, v3, v4, v5, v6, cast_id])) noncommon = buf.peek_bytes_left() castdata = CastMember.parse_castdata(type, cast_id, SeqBuffer(noncommon), attrs) res = CastMember(snr, type, name, ctime, mtime, attrs, castdata) return res
def create_section_map(f, loader_context): while True: xsectheader = f.read(4) if len(xsectheader) < 4: break [stag] = struct.unpack('<4s', xsectheader) stag = rev(stag) ssize = read_varint(f) print("stag=%s ssize=%d" % (stag, ssize)) if ssize==0: break else: sect_data = f.read(ssize) if stag == "Fcdr" or stag == "FGEI": sect_data = zlib.decompress(sect_data) print("ssize decompressed=%d" % (len(sect_data))) elif stag == "ABMP": buf = SeqBuffer(sect_data) sect_data_null = buf.unpackVarint() sect_data_size = buf.unpackVarint() sect_data = zlib.decompress(buf.peek_bytes_left()) del buf print("ssize decompressed=%d=%d" % (sect_data_size, len(sect_data))) abmp = parse_abmp_section(sect_data, f) print("DB| ABMP: %s" % abmp) if stag != "ABMP": print ("DB| %s -> %s" % (stag, sect_data)) section_base_pos = f.tell() # entries_by_nr = {} # for e in abmp: # entries_by_nr[e.nr] = e # Fetch the sections: sections = [] sections_in_ils_by_nr = {} ils_section_bytes = None for e in abmp: snr = e.nr print("DB| section nr %s: %s" % (snr,e)) if e.offset == -1: # Compressed, in ILS section. section = LateSectionImpl(snr,e.tag,e.size) sections_in_ils_by_nr[snr] = section elif e.repr_mode==0: section = ZSectionImpl(snr, e.tag, e.size, section_base_pos + e.offset, f) elif e.repr_mode==1: section = UncompSectionImpl(snr, e.tag, e.size, e.offset, f) # f.seek(section_base_pos + e.offset) # raw_sdata = f.read(e.size) # if e.repr_mode==0: # sdata = zlib.decompress(raw_sdata) # elif e.repr_mode==1: # sdata = raw_sdata else: raise "unknown repr_mode: %d" % e.repr_mode # entries_by_nr[snr] = (e.tag,sdata) sections.append(section) if e.tag=="ILS ": ils_section_bytes = section.bytes() if sections_in_ils_by_nr: read_ILS_section_into(ils_section_bytes, sections_in_ils_by_nr, sections) # print "Sections=%s" % (sections.keys()) print("Sections:") for e in sections: print(" %d: %s" % (e.nr, e)) # Debug: # for snr in sections: # (tag,data) = sections[snr] # if tag=="Lscr" or tag=="LctX":# or tag=="Lnam": # print "DB| %d: %s->%s" % (snr, tag, data) # if tag=="Lnam": # print "DB| %d: %s->%s" % (snr, tag, LnamSection.parse(data)) return SectionMapImpl(sections)
def parse_score_entry_nr0(blob): buf = SeqBuffer(blob) [actualSize, c2, frameCount, c4, sprite_size, c6, v7] = buf.unpack('>3i4h') print("DB| Score root primary: header=%s" % [actualSize, c2, frameCount, c4, sprite_size, c6, v7]) sprite_count = v7 + 6 print("DB| Score root primary: extra=%s" % [c2, c4, c6, v7]) #print "DB | Score root <primary: residue=<%s>" % (buf.buf[actualSize:],) buf = SeqBuffer(blob[buf.tell():actualSize]) maxOffset = 0 totItNr = 0 deltas = [] for frNr in range(1, frameCount + 1): frNr += 1 [frameDataLength] = buf.unpack('>H') frBuf = SeqBuffer(buf.readBytes(frameDataLength - 2)) #print "DB| Score root framedata raw=<%s>" % frBuf.buf delta_items = [] itNr = 0 while not frBuf.at_eof(): itNr += 1 totItNr += 1 [itemLength, offset] = frBuf.unpack('>HH') if offset > maxOffset: maxOffset = offset s = frBuf.readBytes(itemLength) delta_items.append(FrameDeltaItem(offset, s)) #print "DB| Score framedata entry [%d][%d] (len %d): %d/0x%x, <%s>" % (frNr, itNr, itemLength, offset, offset, s) deltas.append(FrameDelta(delta_items)) #print "DB| Score framedata = %s" % deltas #print "DB| Score framedata: maxOffset = %d (%d*48) chNr=%d totItNr=%d" % (maxOffset, maxOffset // 48, frNr, totItNr) return (sprite_count, sprite_size, deltas)
def parse_cast_lib_section(blob, loader_context): # Read header: buf = SeqBuffer(blob) [v1, nElems, ofsPerElem, nOffsets, v5] = buf.unpack('>iiHii') print( "DB| Cast lib section header: nElems=%d, nOffsets=%d, ofsPerElem=%d, misc=%s" % (nElems, nOffsets, ofsPerElem, [v1, v5])) # Read offset table: offsets = [] for i in range(nOffsets): [offset] = buf.unpack('>i') offsets.append(offset) base = buf.tell() #print "DB| Cast lib section: offsets=%s" % offsets offnr = 0 table = [] for enr in range(nElems): entry = [] for i in range(ofsPerElem): subblob = buf.buf[base + offsets[offnr]:base + offsets[offnr + 1]] offnr += 1 #print "DB| i=%d subblob=<%s>" % (i,subblob) buf2 = SeqBuffer(subblob) if i == 0: item = buf2.unpackString8() elif i == 1: if buf2.bytes_left() > 0: item = buf2.unpackString8() else: item = None elif i == 2: [item] = buf2.unpack('>h') elif i == 3: [w1, w2, w3, w4] = buf2.unpack('>hhhh') item = (w1, w2, w3, w4) else: item = subblob entry.append(item) print("DB| Cast lib table entry #%d: %s" % (enr + 1, entry)) [name, path, _zero, (low_idx, high_idx, assoc_id, self_idx)] = entry table.append( CastLibrary(enr + 1, name, path, assoc_id, (low_idx, high_idx), self_idx)) return CastLibraryTable(table)