def test_parallel_write(self): num_files = 4 files = [ tempfile.NamedTemporaryFile(prefix='test_fuse.test_parallel_copy') for i in range(num_files) ] with contextlib.nested(*files): hashes = [] for f in files: lib.make_file(f.name, pow(10, 8), 'arbitrary') hashes.append(lib.md5_hex_file(f.name)) proc_pool = multiprocessing.Pool(len(files)) proc_pool_results = [ proc_pool.apply_async( shutil.copyfile, (f.name, os.path.join(self.mount_point, os.path.basename(f.name)))) for f in files ] for r in proc_pool_results: r.get() for f, h in zip(files, hashes): self.admin.assert_icommand(['ils', '-L'], 'STDOUT_SINGLELINE', os.path.basename(f.name)) with tempfile.NamedTemporaryFile( prefix='test_fuse.test_parallel_copy_get') as fget: self.admin.assert_icommand( ['iget', '-f', os.path.basename(f.name), fget.name]) assert lib.md5_hex_file(fget.name) == h
def helper_irodsFs_iput_to_mv(self, filesize): with tempfile.NamedTemporaryFile(prefix=sys._getframe().f_code.co_name + '_0') as f: lib.make_file(f.name, filesize, 'arbitrary') hash0 = lib.md5_hex_file(f.name) self.admin.assert_icommand(['iput', f.name]) basename = os.path.basename(f.name) self.helper_irodsFs_stat(basename, self.mount_point, self.admin.session_collection, filesize) with tempfile.NamedTemporaryFile(prefix=sys._getframe().f_code.co_name + '_1') as f: shutil.move(os.path.join(self.mount_point, basename), f.name) assert basename not in os.listdir(self.mount_point) self.admin.assert_icommand_fail(['ils'], 'STDOUT_SINGLELINE', basename) hash1 = lib.md5_hex_file(f.name) assert hash0 == hash1
def helper_irodsFs_iget_and_hash(self, data_object_path): basename = os.path.basename(data_object_path) with tempfile.NamedTemporaryFile( prefix=sys._getframe().f_code.co_name) as f: self.admin.assert_icommand( ['iget', '-f', data_object_path, f.name]) hash_ = lib.md5_hex_file(f.name) return hash_
def helper_irodsFs_cp_into_mount_point(self, target_dir, filesize): with tempfile.NamedTemporaryFile(prefix=sys._getframe().f_code.co_name) as f: lib.make_file(f.name, filesize, 'arbitrary') hash_ = lib.md5_hex_file(f.name) shutil.copy(f.name, target_dir) fullpath = os.path.join(target_dir, os.path.basename(f.name)) assert os.stat(fullpath).st_size == filesize return fullpath, hash_
def helper_irodsFs_cp_into_mount_point(self, target_dir, filesize): with tempfile.NamedTemporaryFile( prefix=sys._getframe().f_code.co_name) as f: lib.make_file(f.name, filesize, 'arbitrary') hash_ = lib.md5_hex_file(f.name) shutil.copy(f.name, target_dir) fullpath = os.path.join(target_dir, os.path.basename(f.name)) assert os.stat(fullpath).st_size == filesize return fullpath, hash_
def test_parallel_write(self): num_files = 4 files = [tempfile.NamedTemporaryFile(prefix='test_fuse.test_parallel_copy') for i in range(num_files)] with contextlib.nested(*files): hashes = [] for f in files: lib.make_file(f.name, pow(10,8), 'arbitrary') hashes.append(lib.md5_hex_file(f.name)) proc_pool = multiprocessing.Pool(len(files)) proc_pool_results = [proc_pool.apply_async(shutil.copyfile, (f.name, os.path.join(self.mount_point, os.path.basename(f.name)))) for f in files] for r in proc_pool_results: r.get() for f, h in zip(files, hashes): self.admin.assert_icommand(['ils', '-L'], 'STDOUT_SINGLELINE', os.path.basename(f.name)) with tempfile.NamedTemporaryFile(prefix='test_fuse.test_parallel_copy_get') as fget: self.admin.assert_icommand(['iget', '-f', os.path.basename(f.name), fget.name]) assert lib.md5_hex_file(fget.name) == h
def helper_irodsFs_iget_and_hash(self, data_object_path): basename = os.path.basename(data_object_path) with tempfile.NamedTemporaryFile(prefix=sys._getframe().f_code.co_name) as f: self.admin.assert_icommand(['iget', '-f', data_object_path, f.name]) hash_ = lib.md5_hex_file(f.name) return hash_