def _test(self, filename: str, source_must_remain: bool): """Do a move/copy test. The result should be the same, regardless of whether the source file was already compressed or not. """ # Make a copy we can move around without moving the actual file in # the source tree. srcfile = self.srcdir / filename destfile = self.destdir / filename srcfile.parent.mkdir(parents=True, exist_ok=True) destfile.parent.mkdir(parents=True, exist_ok=True) shutil.copy2(str(self.blendfiles / filename), str(srcfile)) if source_must_remain: compressor.copy(srcfile, destfile) else: compressor.move(srcfile, destfile) self.assertEqual(source_must_remain, srcfile.exists()) self.assertTrue(destfile.exists()) if destfile.suffix == '.blend': self.bf = blendfile.BlendFile(destfile) self.assertTrue(self.bf.is_compressed) return with destfile.open('rb') as infile: magic = infile.read(3) if destfile.suffix == '.jpg': self.assertEqual(b'\xFF\xD8\xFF', magic, 'Expected %s to be a JPEG' % destfile) else: self.assertNotEqual(b'\x1f\x8b', magic[:2], 'Expected %s to be NOT compressed' % destfile)
def test_loading(self): self.bf = blendfile.BlendFile(self.blendfiles / 'basic_file_compressed.blend') self.assertTrue(self.bf.is_compressed) ob = self.bf.code_index[b'OB'][0] name = ob.get((b'id', b'name'), as_str=True) self.assertEqual('OBümlaut', name)
def test_as_context(self): with blendfile.BlendFile(self.blendfiles / 'basic_file_compressed.blend') as bf: filepath = bf.filepath raw_filepath = bf.raw_filepath self.assertTrue(bf.fileobj.closed) self.assertTrue(filepath.exists()) self.assertFalse(raw_filepath.exists())
def setUp(self): self.orig = self.blendfiles / 'linked_cube.blend' self.to_modify = self.orig.with_name('linked_cube_modified.blend') copyfile(str(self.orig), str( self.to_modify)) # TODO: when requiring Python 3.6+, remove str() self.bf = blendfile.BlendFile(self.to_modify, mode='r+b') self.assertFalse(self.bf.is_compressed)
def test_circular_files(self): self.bf = blendfile.BlendFile(self.blendfiles / 'recursive_dependency_1.blend') blocks = {} for block in file2blocks.iter_blocks(self.bf): blocks[block.id_name] = block self.assertNotEqual({}, blocks) self.assertIn(b'MAMaterial', blocks) self.assertIn(b'OBCube', blocks) self.assertIn(b'MECube', blocks)
def test_change_path(self): library = self.bf.code_index[b'LI'][0] # Change it from absolute to relative. library[b'filepath'] = b'//basic_file.blend' library[b'name'] = b'//basic_file.blend' # Reload the blend file to inspect that it was written properly. self.bf.close() self.bf = blendfile.BlendFile(self.to_modify, mode='r+b') library = self.bf.code_index[b'LI'][0] self.assertEqual(b'//basic_file.blend', library[b'filepath']) self.assertEqual(b'//basic_file.blend', library[b'name'])
def test_id_blocks(self): self.bf = blendfile.BlendFile(self.blendfiles / 'doubly_linked.blend') foreign_blocks = {} for block in file2blocks.iter_blocks(self.bf): # Only register blocks from libraries. if block.bfile == self.bf: continue foreign_blocks[block.id_name] = block self.assertNotEqual({}, foreign_blocks) # It should find directly linked blocks (GRCubes and MABrick) as well # as indirectly linked (MECube³). self.assertIn(b'GRCubes', foreign_blocks) self.assertIn(b'MABrick', foreign_blocks) self.assertIn('MECube³'.encode(), foreign_blocks) self.assertIn('OBümlaut'.encode(), foreign_blocks)
def test_array_of_pointers(self): self.bf = blendfile.BlendFile(self.blendfiles / 'multiple_materials.blend') mesh = self.bf.code_index[b'ME'][0] assert isinstance(mesh, blendfile.BlendFileBlock) material_count = mesh[b'totcol'] self.assertEqual(4, material_count) for i, material in enumerate( mesh.iter_array_of_pointers(b'mat', material_count)): if i == 0: name = b'MAMaterial.000' elif i in {1, 3}: name = b'MAMaterial.001' else: name = b'MAMaterial.002' self.assertEqual(name, material.id_name)
def test_array_of_lamp_textures(self): self.bf = blendfile.BlendFile(self.blendfiles / 'lamp_textures.blend') lamp = self.bf.code_index[b'LA'][0] assert isinstance(lamp, blendfile.BlendFileBlock) mtex0 = lamp.get_pointer(b'mtex') tex = mtex0.get_pointer(b'tex') self.assertEqual(b'TE', tex.code) self.assertEqual(b'TEClouds', tex.id_name) for i, mtex in enumerate(lamp.iter_fixed_array_of_pointers(b'mtex')): if i == 0: name = b'TEClouds' elif i == 1: name = b'TEVoronoi' else: self.fail('Too many textures reported: %r' % mtex) tex = mtex.get_pointer(b'tex') self.assertEqual(b'TE', tex.code) self.assertEqual(name, tex.id_name)
def cli_blocks(args): bpath = args.blendfile if not bpath.exists(): log.fatal('File %s does not exist', args.blendfile) return 3 per_blocktype = collections.defaultdict(BlockTypeInfo) print('Opening %s' % bpath) bfile = blendfile.BlendFile(bpath) print('Inspecting %s' % bpath) for block in bfile.blocks: if block.code == b'DNA1': continue index_as = block_key(block) info = per_blocktype[index_as] info.name = index_as info.total_bytes += block.size info.num_blocks += 1 info.sizes.append(block.size) info.blocks.append(block) fmt = '%-35s %10s %10s %10s %10s' print(fmt % ('Block type', 'Total Size', 'Num blocks', 'Avg Size', 'Median')) print(fmt % (35 * '-', 10 * '-', 10 * '-', 10 * '-', 10 * '-')) infos = sorted(per_blocktype.values(), key=by_total_bytes, reverse=True) for info in infos[:args.limit]: median_size = sorted(info.sizes)[len(info.sizes) // 2] print(fmt % (info.name, common.humanize_bytes( info.total_bytes), info.num_blocks, common.humanize_bytes(info.total_bytes // info.num_blocks), common.humanize_bytes(median_size))) print(70 * '-') # From the blocks of the most space-using category, the biggest block. biggest_block = sorted(infos[0].blocks, key=lambda blck: blck.size, reverse=True)[0] print('Biggest %s block is %s at address %s' % ( block_key(biggest_block), common.humanize_bytes(biggest_block.size), biggest_block.addr_old, )) print('Finding what points there') addr_to_find = biggest_block.addr_old found_pointer = False for block in bfile.blocks: for prop_path, prop_value in block.items_recursive(): if not isinstance(prop_value, int) or prop_value != addr_to_find: continue print(' ', block, prop_path) found_pointer = True if not found_pointer: print('Nothing points there') if args.dump: print('Hexdump:') bfile.fileobj.seek(biggest_block.file_offset) data = bfile.fileobj.read(biggest_block.size) line_len_bytes = 32 import codecs for offset in range(0, len(data), line_len_bytes): line = codecs.encode(data[offset:offset + line_len_bytes], 'hex').decode() print('%6d -' % offset, ' '.join(line[i:i + 2] for i in range(0, len(line), 2)))
def test_no_datablocks(self): with self.assertRaises(exceptions.NoDNA1Block): blendfile.BlendFile(self.blendfiles / 'corrupt_only_magic.blend')
def test_loading(self): with self.assertRaises(exceptions.BlendFileError): blendfile.BlendFile(pathlib.Path(__file__))
def setUp(self): self.bf = blendfile.BlendFile(self.blendfiles / 'with_sequencer.blend')
def setUp(self): self.bf = blendfile.BlendFile(self.blendfiles / 'basic_file.blend')
def reload(self): self.bf.close() self.bf = blendfile.BlendFile(self.to_modify, mode='r+b')