def _parse_pdb_header(stream): pdbheader = Storage() # PDB fields pdbheader.name, pdbheader.attributes, pdbheader.version, pdbheader.creation_timestamp, pdbheader.modification_timestamp, pdbheader.last_backup_timestamp, pdbheader.modification_number, pdbheader.appinfo_offset, pdbheader.sortinfo_offset, pdbheader.type, pdbheader.creator, pdbheader.uniqueidseed, pdbheader.nextrecordlistid, pdbheader.num_records, = struct.unpack( ">32sHHLLLLLL4s4sLLH", stream.read(78) ) # record offsets and lengths records = [] start = struct.unpack(">LBBBB", stream.read(8))[0] for n in range(1, pdbheader.num_records): next_start = struct.unpack(">LBBBB", stream.read(8))[0] records.append((start, next_start - start)) start = next_start stream.seek(0, 2) end = stream.tell() records.append((start, end - start)) pdbheader.records = records # Clean up some of the fields pdbheader.name = re.sub("[^-A-Za-z0-9'\";:,. ]+", "_", pdbheader.name.replace("\x00", "")) return pdbheader
def _parse_exth_header (raw): exth = Storage() exth.identifier, \ exth.header_length, \ exth.record_count, \ = struct.unpack('>4sLL', raw[:12]) exthdata = raw[12:] pos = 0 records = [] records_left = exth.record_count while records_left > 0: records_left -= 1 record = Storage() record.type, \ record.length, \ = struct.unpack('>LL', exthdata[pos:pos + 8]) record.data = exthdata[pos+8:pos+record.length] pos += record.length records.append(record) exth.records = records return exth