def parse_compressed_list(self, stream, identifier, blocksizes): rawsize = dword2py_int(stream.read(4)) list_identifier = stream.read(4) print list_identifier size = blocksizes[rawsize] size_field = py_int2dword(size) if size & 1:size += 1 offset = stream.tell() obj = model.RiffList(identifier + size_field + list_identifier) while stream.tell() <= offset + size - 8: ret = self.parse_comressed_stream(stream, blocksizes) if ret is None: stream.seek(offset) chunk = stream.read(size - 4) return model.RiffUnparsedList(identifier + size_field + \ list_identifier + chunk) else: obj.childs.append(ret) return obj
def parse_compressed_object(self, stream, identifier, blocksizes): if not identifier[:3].isalnum(): return None rawsize = dword2py_int(stream.read(4)) size = blocksizes[rawsize] size_field = py_int2dword(size) if size & 1: size += 1 chunk = stream.read(size) return model.RiffObject(identifier + size_field + chunk)
def parse_polygon(obj): data = obj.loda.chunk offset = 112 if obj.version == CDR6:offset = 100 if obj.version == CDR13:offset = 104 #Polygon angles obj.plg_num = dword2py_int(data[offset:offset + 4]) obj.loda.cache_fields.append((offset, 4, 'num of polygon edges'))
def parse_compressed_object(self, stream, identifier, blocksizes): if not identifier[:3].isalnum(): return None rawsize = dword2py_int(stream.read(4)) size = blocksizes[rawsize] size_field = py_int2dword(size) if size & 1:size += 1 chunk = stream.read(size) return model.RiffObject(identifier + size_field + chunk)
def __init__(self, chunk): self.chunk = chunk self.identifier = chunk[:4] self.chunk_size = dword2py_int(chunk[4:8]) self.chunk_tag = '' + self.identifier self.cache_fields = [ (0, 4, 'identifier'), (4, 4, 'chunk size'), ]
def update_from_chunk(self): rifx = self.config.rifx records = self.data['records'] = [] pos = 14 for _i in range(utils.word2py_int(self.chunk[12:14], rifx)): rec_id = utils.word2py_int(self.chunk[pos:pos + 2], rifx) pos += 2 offset = utils.dword2py_int(self.chunk[pos:pos + 4], rifx) pos += 4 records.append((rec_id, offset))
def parse_cmpr_list(self, buff): obj = model.RiffCmprList(buff) import StringIO, zlib compressedsize = dword2py_int(buff[12:16]) decomp = zlib.decompressobj() uncompresseddata = decomp.decompress(buff[36:]) blocksizesdata = zlib.decompress(buff[36 + compressedsize:]) blocksizes = [] for i in range(0, len(blocksizesdata), 4): blocksizes.append(dword2py_int(blocksizesdata[i:i + 4])) stream = StringIO.StringIO(uncompresseddata) while stream.tell() < len(uncompresseddata): ret = self.parse_comressed_stream(stream, blocksizes) obj.childs.append(ret) return obj
def update(self): data = self.chunk[8:] # <trfd> chunk header processing self.data_num = dword2py_int(data[4:8]) self.cache_fields.append((12, 4, 'number of data')) self.data_start = dword2py_int(data[8:12]) self.cache_fields.append((16, 4, 'data start')) self.data_type_start = dword2py_int(data[12:16]) self.cache_fields.append((20, 4, 'data types start')) # transformation matrix processing start = 32 + 8 if self.version == CDR13: start += 8 data = self.chunk[start:start + 48] self.cache_fields.append((start, 48, 'trafo matrix')) self.trafo = parse_matrix(data)
def update(self): CdrGraphObj.update(self) data = self.loda.chunk offset = 112 if self.version == CDR6:offset = 100 if self.version == CDR13:offset = 104 #Polygon angles self.plg_num = dword2py_int(data[offset:offset + 4]) self.loda.cache_fields.append((offset, 4, 'num of polygon edges'))
def update(self): CdrGraphObj.update(self) data = self.loda.chunk offset = 112 if self.version == CDR6: offset = 100 if self.version == CDR13: offset = 104 #Polygon angles self.plg_num = dword2py_int(data[offset:offset + 4]) self.loda.cache_fields.append((offset, 4, 'num of polygon edges'))
def update(self): data = self.chunk[8:] #<trfd> chunk header processing self.data_num = dword2py_int(data[4:8]) self.cache_fields.append((12, 4, 'number of data')) self.data_start = dword2py_int(data[8:12]) self.cache_fields.append((16, 4, 'data start')) self.data_type_start = dword2py_int(data[12:16]) self.cache_fields.append((20, 4, 'data types start')) #transformation matrix processing start = 32 + 8 if self.version == CDR13: start += 8 data = self.chunk[start:start + 48] self.cache_fields.append((start, 48, 'trafo matrix')) self.trafo = parse_matrix(data)
def parse_compressed_object(self, stream, identifier, blocksizes): if not identifier[:3].isalnum(): return None rawsize = dword2py_int(stream.read(4)) size = blocksizes[rawsize] size_field = py_int2dword(size) if size & 1: size += 1 chunk = stream.read(size) self.report_stream_position(stream.tell()) class_ = self.get_class(identifier) return class_(identifier + size_field + chunk)
def do_update(self, presenter, action=False): RiffList.do_update(self, presenter) self.obj_type = None lgob_chunk = find_chunk(self.childs, 'lgob') self.loda = find_chunk(lgob_chunk.childs, 'loda') self.obj_type = dword2py_int(self.loda.chunk[0x18:0x1c]) if not self.obj_type is None and obj_parse.has_key(self.obj_type): parse_trafo(self) obj_parse[self.obj_type][0](self)
def __init__(self, chunk): self.childs = [] self.chunk = chunk self.identifier = 'LIST' self.chunk_tag = self.chunk[8:12] self.chunk_size = dword2py_int(chunk[4:8]) self.cache_fields = [ (0, 4, 'list identifier'), (4, 4, 'chunk size'), (8, 4, 'chunk tag') ]
def parse_compressed_object(self, stream, identifier, blocksizes): if not identifier[:3].isalnum(): return None rawsize = dword2py_int(stream.read(4)) size = blocksizes[rawsize] size_field = py_int2dword(size) if size & 1:size += 1 chunk = stream.read(size) self.report_stream_position(stream.tell()) class_ = self.get_class(identifier) return class_(identifier + size_field + chunk)
def __init__(self, chunk): RiffObject.__init__(self, chunk) data = self.chunk #<loda> chunk header processing self.data_num = dword2py_int(data[12:16]) self.cache_fields.append((12, 4, 'number of data')) self.data_start = dword2py_int(data[16:20]) self.cache_fields.append((16, 4, 'data start')) self.data_type_start = dword2py_int(data[20:24]) self.cache_fields.append((20, 4, 'data types start')) self.object_type = dword2py_int(data[24:28]) self.cache_fields.append((24, 4, 'object type')) self.data_list = [] num = self.data_num start = self.data_start + 8 start_t = self.data_type_start + 8 self.cache_fields.append((start, 4 * num, 'data offsets')) self.cache_fields.append((start_t, 4 * num, 'data type offsets')) for i in range(self.data_num): offset = dword2py_int(data[start + i * 4:start + i * 4 + 4]) argtype = dword2py_int(data[start_t + (num - 1 - i) * 4:start_t + (num - 1 - i) * 4 + 4]) self.data_list.append([argtype, offset])
def update_from_chunk(self): rifx = self.config.rifx layers = self.data['layers'] = [] pos = 12 self.data['page'] = utils.word2py_int(self.chunk[10:12], rifx) for _i in range(utils.word2py_int(self.chunk[8:10], rifx)): offset = utils.dword2py_int(self.chunk[pos:pos + 4], rifx) pos += 4 sz = utils.word2py_int(self.chunk[pos:pos + 2], rifx) pos += 2 name = self.chunk[pos:pos + sz] pos += sz + 4 layers.append((offset, name))
def update_from_chunk(self): rifx = self.config.rifx self.data['file_id'] = self.chunk[8:40].rstrip('\x00') self.data['os_type'] = self.chunk[40:56].rstrip('\x00') self.data['byte_order'] = self.chunk[56:60] if self.data['byte_order'] == cmx_const.CONT_BYTE_ORDER_BE: self.config.rifx = True self.data['coord_size'] = self.chunk[60:62] if self.data['coord_size'] == cmx_const.CONT_COORDSIZE_16BIT: self.config.v16bit = True self.data['major'] = self.chunk[62:66] self.config.v1 = self.data['major'].startswith('\x31') self.data['minor'] = self.chunk[66:70] self.data['unit'] = self.chunk[70:72] self.data['factor'] = self.chunk[72:80] self.data['IndexSection'] = utils.dword2py_int(self.chunk[92:96], rifx) self.data['InfoSection'] = utils.dword2py_int(self.chunk[96:100], rifx) self.data['Thumbnail'] = utils.dword2py_int(self.chunk[100:104], rifx) sig = '>iiii' if rifx else '<iiii' self.data['bbox'] = struct.unpack(sig, self.chunk[104:120]) self.data['tally'] = utils.dword2py_int(self.chunk[120:124], rifx)
def do_update(self, presenter, action=False): type = None lgob_chunk = find_chunk(self.childs, 'lgob') loda_chunk = find_chunk(lgob_chunk.childs, 'loda') type = dword2py_int(loda_chunk.chunk[0x18:0x1c]) if not type is None and obj_dict.has_key(type): new_obj = obj_dict[type](self.chunk) new_obj.parent = self.parent new_obj.version = self.version new_obj.childs = self.childs new_obj.loda = loda_chunk index = self.parent.childs.index(self) self.parent.childs.insert(index, new_obj) self.parent.childs.remove(self) new_obj.do_update(presenter) else: RiffList.do_update(self, presenter)
def set_defaults(self): self.data['identifier'] = cmx_const.CONT_ID self.data['file_id'] = cmx_const.CONT_FILE_ID self.data['os_type'] = cmx_const.CONT_OS_ID_WIN self.data['byte_order'] = cmx_const.CONT_BYTE_ORDER_LE self.data['coord_size'] = cmx_const.CONT_COORDSIZE_16BIT \ if self.config.v16bit else cmx_const.CONT_COORDSIZE_32BIT self.data['major'] = cmx_const.CONT_MAJOR_V1 \ if self.config.v1 else cmx_const.CONT_MAJOR_V2 self.data['minor'] = cmx_const.CONT_MINOR self.data['unit'] = cmx_const.CONT_UNIT_MM self.data['factor'] = cmx_const.CONT_FACTOR_MM self.data['IndexSection'] = 0 self.data['InfoSection'] = 0 self.data['Thumbnail'] = utils.dword2py_int(4 * '\xff', self.config.rifx) self.data['bbox'] = (0, 0, 0, 0) self.data['tally'] = 0
def update_from_chunk(self): chunk = zlib.decompress(self.chunk[20:]) pos = 0 parent = self while pos < len(chunk): identifier = chunk[pos:pos + 4] sz = chunk[pos + 4:pos + 8] if identifier in cmx_const.LIST_IDS: name = chunk[pos + 8:pos + 12] obj = make_cmx_chunk(self.config, identifier + sz + name) parent.add(obj) parent = obj pos += 12 continue size = utils.dword2py_int(sz, self.config.rifx) size += 1 if size > (size // 2) * 2 else 0 data = chunk[pos + 8:pos + 8 + size] parent.add(make_cmx_chunk(self.config, identifier + sz + data)) pos += size + 8 self.data['cpng'] = self.chunk[20:] self.chunk = self.chunk[:20]
def update_for_sword(self): CmxRiffElement.update_for_sword(self) clr_table_sz = 4 * utils.dword2py_int(self.chunk[44:48], self.config.rifx) pos = 52 + clr_table_sz self.cache_fields += [ (8, 4, 'dwClipboardFormat'), # BITMAPINFOHEADER (12, 4, 'biSize - header size'), (16, 4, 'biWidth - image width'), (20, 4, 'biHeight - image height'), (24, 2, 'biPlanes'), (26, 2, 'biBitCount'), (28, 4, 'biCompression'), (32, 4, 'biSizeImage'), (36, 4, 'biXPelsPerMeter'), (40, 4, 'biYPelsPerMeter'), (44, 4, 'biClrUsed'), (48, 4, 'biClrImportant'), # COLOR TABLE (52, clr_table_sz, 'Color Table'), # Pixels (pos, len(self.chunk) - pos, 'Pixels'), ]
def parse_compressed_list(self, stream, identifier, blocksizes): rawsize = dword2py_int(stream.read(4)) list_identifier = stream.read(4) size = blocksizes[rawsize] size_field = py_int2dword(size) if size & 1: size += 1 offset = stream.tell() class_ = self.get_class(identifier, list_identifier) obj = class_(identifier + size_field + list_identifier) while stream.tell() <= offset + size - 8: ret = self.parse_comressed_stream(stream, blocksizes) if ret is None: stream.seek(offset) chunk = stream.read(size - 4) return model.RiffUnparsedList(identifier + size_field + \ list_identifier + chunk) else: obj.childs.append(ret) return obj
def update_from_chunk(self): rifx = self.config.rifx self.data['jump'] = utils.dword2py_int(self.chunk[4:8], rifx)
def update(self): CdrGraphObj.update(self) data = self.loda.chunk offset = 108 for item in self.loda.data_list: if item[0] == const.DATA_COORDS: offset = item[1] + 8 self.paths = [] path = [] points = [] point1 = [] point2 = [] pointnum = dword2py_int(data[offset:offset + 4]) self.num_of_points = pointnum self.loda.cache_fields.append((offset, 4, 'num of points')) self.loda.cache_fields.append( (offset + 4, 8 * pointnum, 'curve points')) self.loda.cache_fields.append( (offset + 4 + pointnum * 8, pointnum, 'point flags')) for i in range(pointnum): x = parse_size_value(data[offset + 4 + i * 8:offset + 8 + i * 8]) y = parse_size_value(data[offset + 8 + i * 8:offset + 12 + i * 8]) point_type = ord(data[offset + 4 + pointnum * 8 + i]) if point_type & 0x10 == 0 and point_type & 0x20 == 0: marker = NODE_CUSP if point_type & 0x10 == 0x10: marker = NODE_SMOOTH if point_type & 0x20 == 0x20: marker = NODE_SYMMETRICAL if point_type & 0x40 == 0 and point_type & 0x80 == 0: if path: path.append(deepcopy(points)) path.append(CURVE_OPENED) self.paths.append(deepcopy(path)) path = [] points = [] point1 = [] point2 = [] path.append([x, y]) if point_type & 0x40 == 0x40 and point_type & 0x80 == 0: points.append([x, y]) point1 = [] point2 = [] if point_type & 0x40 == 0 and point_type & 0x80 == 0x80: points.append(deepcopy([point1, point2, [x, y], marker])) point1 = [] point2 = [] if point_type & 0x40 == 0x40 and point_type & 0x80 == 0x80: if point1: point2 = [x, y] else: point1 = [x, y] if point_type & 8 == 8: if path and points: path.append(deepcopy(points)) path.append(CURVE_CLOSED) self.paths.append(deepcopy(path)) path = [] points = [] if path: path.append(deepcopy(points)) path.append(CURVE_OPENED) self.paths.append(deepcopy(path))
def readdword(self): return utils.dword2py_int(self.fileptr.read(4))
def update(self): CdrGraphObj.update(self) data = self.loda.chunk offset = 108 for item in self.loda.data_list: if item[0] == const.DATA_COORDS: offset = item[1] + 8 self.paths = [] path = [] points = [] point1 = [] point2 = [] pointnum = dword2py_int(data[offset:offset + 4]) self.num_of_points = pointnum self.loda.cache_fields.append((offset, 4, 'num of points')) self.loda.cache_fields.append((offset + 4, 8 * pointnum, 'curve points')) self.loda.cache_fields.append((offset + 4 + pointnum * 8, pointnum, 'point flags')) for i in range (pointnum): x = parse_size_value(data[offset + 4 + i * 8:offset + 8 + i * 8]) y = parse_size_value(data[offset + 8 + i * 8:offset + 12 + i * 8]) point_type = ord(data[offset + 4 + pointnum * 8 + i]) if point_type & 0x10 == 0 and point_type & 0x20 == 0: marker = NODE_CUSP if point_type & 0x10 == 0x10: marker = NODE_SMOOTH if point_type & 0x20 == 0x20: marker = NODE_SYMMETRICAL if point_type & 0x40 == 0 and point_type & 0x80 == 0: if path: path.append(deepcopy(points)) path.append(CURVE_OPENED) self.paths.append(deepcopy(path)) path = [] points = [] point1 = [] point2 = [] path.append([x, y]) if point_type & 0x40 == 0x40 and point_type & 0x80 == 0: points.append([x, y]) point1 = [] point2 = [] if point_type & 0x40 == 0 and point_type & 0x80 == 0x80: points.append(deepcopy([point1, point2, [x, y], marker])) point1 = [] point2 = [] if point_type & 0x40 == 0x40 and point_type & 0x80 == 0x80: if point1: point2 = [x, y] else: point1 = [x, y] if point_type & 8 == 8: if path and points: path.append(deepcopy(points)) path.append(CURVE_CLOSED) self.paths.append(deepcopy(path)) path = [] points = [] if path: path.append(deepcopy(points)) path.append(CURVE_OPENED) self.paths.append(deepcopy(path))
def __init__(self, chunk): RiffList.__init__(self, chunk) self.compressedsize = dword2py_int(chunk[12:16]) self.uncompressedsize = dword2py_int(chunk[16:20]) self.blocksizessize = dword2py_int(chunk[20:24])