def test_trunc_zero(self): test_file = os.path.join(test_dir, "trunc_zero_test.aaf") with open(test_file, 'wb+') as f: cfb = CompoundFileBinary(f, 'wb+') s = cfb.open("/zero_trunc", 'w') s.write(b'some data') s.seek(0) s.truncate() cfb.close() with open(test_file, 'rb') as f: cfb = CompoundFileBinary(f, 'rb') s = cfb.open("/zero_trunc", 'r') assert s.dir.byte_size == 0 assert s.dir.sector_id is None
def test_trunc_shrink(self): test_file = os.path.join(test_dir, "trunc_shrink_test.aaf") with open(test_file, 'wb+') as f: cfb = CompoundFileBinary(f, 'w+') s = cfb.open("/grow_trunc", 'w') data = b"F" * 5000 s.write(data) assert s.is_mini_stream() == False s.truncate(256) assert s.is_mini_stream() == True cfb.close() with open(test_file, 'rb') as f: cfb = CompoundFileBinary(f, 'rb') s = cfb.open("/grow_trunc", 'r') data = s.read() assert data == b"F" * 256
def write_and_test(self, filename, data_list, chunksize=61): path = os.path.join(test_dir, filename) with io.open(path, 'wb+') as f: ss = CompoundFileBinary(f, 'wb+') for i, data in enumerate(data_list): s = ss.open("/test_stream%d" % i, 'w') write_data = data while len(write_data): s.write(write_data[:chunksize]) write_data = write_data[chunksize:] s.seek(0) result = s.read() assert result == data s.close() for i, data in enumerate(data_list): s = ss.open("/test_stream%d" % i, 'r') s.seek(0) result = s.read() assert result == data ss.close() with io.open(path, 'rb') as f: ss = CompoundFileBinary(f) for i, data in enumerate(data_list): s = ss.open("/test_stream%d" % i, 'r') assert s.dir.byte_size, len(data) result = s.read() assert result == data
def test_seek(self): path = os.path.join(test_dir, "seek_test.aaf") with open(path, 'wb+') as f: ss = CompoundFileBinary(f, 'wb+') s = ss.open("/seektest", 'w') s.seek(100) data = b"end of seek" s.write(data) assert s.tell() == 100 + len(data) s.close() s = ss.open("/seek_large", 'w') seek_size = 1000000 s.seek(seek_size) data = b"end of seek" s.write(data) assert s.dir.byte_size == seek_size + len(data) s.close() ss.close()
def test_copy(self): src_path = os.path.join(test_files, "test_file_01.aaf") dst_path = os.path.join(test_dir, "test_copy.aaf") file_a = io.open(src_path, 'rb') file_b = io.open(dst_path, 'wb+') ss_a = CompoundFileBinary(file_a, 'rb') ss_b = CompoundFileBinary(file_b, 'wb+') print(ss_a.class_id) print(ss_a.root) for root, storage, streams in ss_a.walk(): for item in storage: entry = ss_b.makedir(item.path(), class_id=item.class_id) for item in streams: s_a = ss_a.open(item.path(), 'r') s_b = ss_b.open(item.path(), 'w') s_b.write(s_a.read()) ss_b.close() file_b.close() with io.open(dst_path, 'rb') as f: ss = CompoundFileBinary(f) print(ss.root) for root, storage, streams in ss.walk(): assert ss_a.exists(root.path()) for item in storage: assert ss_a.exists(item.path()) for item in streams: s_a = ss_a.open(item.path(), 'r') s_b = ss.open(item.path(), 'r') assert s_a.read() == s_b.read()
def write_and_ovewrite(self, filename, data_list1, data_list2, chunksize=61): path = os.path.join(test_dir, filename) with io.open(path, 'wb+', buffering=io.DEFAULT_BUFFER_SIZE) as f: ss = CompoundFileBinary(f, 'wb+') for i, data in enumerate(data_list1): s = ss.open("/test_stream%d" % i, 'w') while len(data): s.write(data[:chunksize]) data = data[chunksize:] s.close() for i, data in enumerate(data_list2): s = ss.open("/test_stream%d" % i, 'w') while len(data): s.write(data[:chunksize]) data = data[chunksize:] s.close() ss.close() with io.open(path, 'rb', buffering=io.DEFAULT_BUFFER_SIZE) as f: ss = CompoundFileBinary(f) for i, data in enumerate(data_list2): s = ss.open("/test_stream%d" % i, 'r') assert s.dir.byte_size, len(data) result = s.read() assert result == data
def test_copy(self): src_path = os.path.join(test_files, "test_file_01.aaf") dst_path = os.path.join(test_dir, "test_copy.aaf") with io.open(src_path, 'rb') as file_a: ss_a = CompoundFileBinary(file_a, 'rb') with io.open(dst_path, 'wb+') as file_b: ss_b = CompoundFileBinary(file_b, 'wb+') # copy everything for root, storage, streams in ss_a.walk(): for item in storage: entry = ss_b.makedir(item.path(), class_id=item.class_id) for item in streams: s_a = ss_a.open(item.path(), 'r') s_b = ss_b.open(item.path(), 'w') s_b.write(s_a.read()) # check everything exists while file live for root, storage, streams in ss_a.walk(): assert ss_b.exists(root.path()) for item in storage: assert ss_b.exists(item.path()) s_b = ss_b.find(item.path()) assert s_b.class_id == item.class_id for item in streams: s_a = ss_a.open(item.path(), 'r') s_b = ss_b.open(item.path(), 'r') assert s_a.read() == s_b.read() ss_b.close() # reopen file and check everything exists with io.open(dst_path, 'rb') as f: ss_b = CompoundFileBinary(f, 'rb') # check everything exists for root, storage, streams in ss_a.walk(): assert ss_b.exists(root.path()) for item in storage: assert ss_b.exists(item.path()) s_b = ss_b.find(item.path()) assert s_b.class_id == item.class_id for item in streams: s_a = ss_a.open(item.path(), 'r') s_b = ss_b.open(item.path(), 'r') assert s_a.read() == s_b.read()
def test_move(self): path = os.path.join(test_dir, "move_test.aaf") src = "/path/to/item" dst = "/dest/path/moved_item" stream_data = b'move stream data' steam_list = [ "/path/to/item/child1/sub/sub/stream", "/path/to/item/stream", "/path/to/item2/stream" ] dir_list = [ "/path/to/item/child1/sub/sub", "/path/to/item/child2", "/path/to/item/child3/more_stuff", "/path/to/item1/child1", "/path/to/item2/child2/sub/sub", "/path/to/item2/child2", "/dest/path/", ] result_dirs = [ "/dest", "/dest/path", "/dest/path/moved_item", "/dest/path/moved_item/child1", "/dest/path/moved_item/child1/sub", "/dest/path/moved_item/child1/sub/sub", "/dest/path/moved_item/child3", "/path", "/path/to", "/path/to/item1", "/path/to/item2", "/path/to/item2/child2", "/path/to/item2/child2/sub", ] result_streams = [ "/dest/path/moved_item/child1/sub/sub/stream", "/dest/path/moved_item/stream", '/dest/path/moved_item/child3/stream' ] with open(path, 'wb+') as f: cfb = CompoundFileBinary(f, 'wb+') for p in dir_list: cfb.makedirs(p) for p in steam_list: s = cfb.open(p, 'w') s.write(stream_data) cfb.move(src, dst) with self.assertRaises(ValueError): cfb.move('/path/to/item2/stream', '/dest/path/moved_item/child3') cfb.move('/path/to/item2/stream', '/dest/path/moved_item/child3/') with self.assertRaises(ValueError): cfb.move('/path/that/doesnt/exist', '/dest/path/moved_item/') with self.assertRaises(ValueError): cfb.move('/path/to/item2/child2', '/path/that/doesnt/exist') for p in result_dirs: assert cfb.exists(p) entry = cfb.find(p) assert entry.isdir() for p in result_streams: assert cfb.exists(p) entry = cfb.find(p) assert entry.isfile() assert entry.open('r').read() == stream_data cfb.close() with open(path, 'rb') as f: cfb = CompoundFileBinary(f, 'rb') for root, storage_items, streams in cfb.walk(): pass # print(root.path(), streams) for p in result_dirs: assert cfb.exists(p) entry = cfb.find(p) assert entry.isdir() for p in result_streams: assert cfb.exists(p) entry = cfb.find(p) assert entry.isfile() assert entry.open('r').read() == stream_data