def test_makedirs(self): with NamedTemporaryDirectory() as tmpdir: d = Path(tmpdir) # Placeholder file so that when removedirs() is called, # it doesn't remove the temporary directory itself. tempf = d / 'temp.txt' with tempf.open('w') as fp: fp.write('blah') try: foo = d / 'foo' boz = foo / 'bar' / 'baz' / 'boz' boz.makedirs() try: assert boz.isdir() finally: foo.rmtree() assert not foo.exists() assert d.exists() foo.mkdir(0o750) boz.makedirs(0o700) try: assert boz.isdir() finally: foo.rmtree() assert not foo.exists() assert d.exists() finally: os.remove(tempf)
def setUp(self): super(SwiftIntegrationTest, self).setUp() if not os.environ.get('SWIFT_TEST_USERNAME'): raise unittest.SkipTest( 'SWIFT_TEST_USERNAME env var not set. Skipping integration test') # Disable loggers so nose output wont be trashed logging.getLogger('requests').setLevel(logging.CRITICAL) logging.getLogger('swiftclient').setLevel(logging.CRITICAL) logging.getLogger('keystoneclient').setLevel(logging.CRITICAL) settings.update({ 'swift': { 'username': os.environ.get('SWIFT_TEST_USERNAME'), 'password': os.environ.get('SWIFT_TEST_PASSWORD'), 'num_retries': 5 }}) # fall back on to swiftstack auth for tenant tenant = os.environ.get('SWIFT_TEST_TENANT', 'AUTH_%s' % os.environ['SWIFT_TEST_USERNAME']) self.test_container = Path('swift://%s/%s' % (tenant, uuid.uuid4())) if self.test_container.exists(): raise ValueError('test container %s already exists.' % self.test_container) try: self.test_container.post() except BaseException: self.test_container.rmtree() raise self.test_dir = self.test_container / 'test'
def test_copytree_to_from_dir_w_manifest(self): num_test_objs = 10 test_obj_size = 100 with NamedTemporaryDirectory(change_dir=True) as tmp_d: self.create_dataset(tmp_d, num_test_objs, test_obj_size) # Make a nested file and an empty directory for testing purposes tmp_d = Path(tmp_d) os.mkdir(tmp_d / 'my_dir') open(tmp_d / 'my_dir' / 'empty_file', 'w').close() os.mkdir(tmp_d / 'my_dir' / 'empty_dir') stor.copytree('.', self.test_dir, use_manifest=True) # Validate the contents of the manifest file manifest_contents = utils.get_data_manifest_contents(self.test_dir) expected_contents = self.get_dataset_obj_names(num_test_objs) expected_contents.extend( ['my_dir/empty_file', 'my_dir/empty_dir/']) expected_contents = [Path('test') / c for c in expected_contents] self.assertEquals(set(manifest_contents), set(expected_contents)) with NamedTemporaryDirectory(change_dir=True) as tmp_d: # Download the results successfully Path(self.test_dir).copytree('test', use_manifest=True) # Now delete one of the objects from s3. A second download # will fail with a condition error Path(self.test_dir / 'my_dir' / 'empty_dir/').remove() with self.assertRaises(exceptions.ConditionNotMetError): Path(self.test_dir).copytree('test', use_manifest=True, num_retries=0)
def test_listdir_other_encoding(self): """ Some filesystems allow non-character sequences in path names. ``.listdir`` should still function in this case. See issue #61 for details. """ raise unittest.SkipTest('test copied over was broken, need to fix') with NamedTemporaryDirectory() as tmpdir: assert Path(tmpdir).listdir() == [] tmpdir_bytes = str(tmpdir).encode('ascii') filename = 'r\xe9\xf1emi'.encode('latin-1') pathname = os.path.join(tmpdir_bytes, filename) with open(pathname, 'w'): pass # first demonstrate that os.listdir works assert os.listdir(tmpdir_bytes) # now try with path.py results = Path(tmpdir).listdir() assert len(results) == 1 res, = results assert isinstance(res, Path) # OS X seems to encode the bytes in the filename as %XX characters. if platform.system() == 'Darwin': assert res.basename() == 'r%E9%F1emi' return assert len(res.basename()) == len(filename)
def test_list_methods(self): fake_bucket = Path('s3://stor-test-bucket2') with self.assertRaises(exceptions.NotFoundError): fake_bucket.list() fake_folder = self.test_bucket / 'not_a_dir' self.assertEquals([], fake_folder.list()) with NamedTemporaryDirectory(change_dir=True): open('file1.txt', 'w').close() open('file2.txt', 'w').close() os.mkdir('nested_dir') os.mkdir('nested_dir/dir') open('nested_dir/dir/file3.txt', 'w').close() self.test_dir.upload(['.']) file_list = self.test_dir.list() starts_with_list = self.test_bucket.list(starts_with='test') self.assertEquals(set(file_list), set(starts_with_list)) self.assertEquals( set(file_list), set([ self.test_dir / 'file1.txt', self.test_dir / 'file2.txt', self.test_dir / 'nested_dir/dir/file3.txt' ])) dir_list = self.test_dir.listdir() self.assertEquals( set(dir_list), set([ self.test_dir / 'file1.txt', self.test_dir / 'file2.txt', self.test_dir / 'nested_dir/' ])) self.assertTrue(self.test_dir.listdir() == (self.test_dir + '/').listdir())
def test_swift_destination(self, mock_upload): dest = Path('swift://tenant/container/file.txt') with tempfile.NamedTemporaryFile() as tmp_f: Path(tmp_f.name).copy(dest) upload_args = mock_upload.call_args_list[0][0] self.assertEquals(upload_args[0], dest.parent) self.assertEquals(upload_args[1][0].source, tmp_f.name) self.assertEquals(upload_args[1][0].object_name, 'file.txt')
def test_s3_destination(self, mock_upload): dest = Path('s3://bucket/key/file.txt') with tempfile.NamedTemporaryFile() as tmp_f: Path(tmp_f.name).copy(dest) upload_args = mock_upload.call_args_list[0][0] self.assertEquals(upload_args[0], dest.parent) self.assertEquals(upload_args[1][0].source, tmp_f.name) self.assertEquals(upload_args[1][0].object_name, 'key/file.txt')
def setup_posix_files(self, files): """Sets up posix files for testing Args: files (List[Str]): list of relative posix files to be created. """ for i, curr_file in enumerate(files): posix_p = Path('./{test_folder}/{path}'.format( test_folder=self.project, path=curr_file)) posix_p.open(mode='w').write('data' + str(i)) self.addCleanup(self.teardown_posix_files)
def setUp(self): super(S3IntegrationTest, self).setUp() if not (os.environ.get('AWS_TEST_ACCESS_KEY_ID') and os.environ.get('AWS_ACCESS_KEY_ID')): raise unittest.SkipTest( 'AWS_TEST_ACCESS_KEY_ID env var not set. Skipping integration test' ) # Disable loggers so nose output is clean logging.getLogger('botocore').setLevel(logging.CRITICAL) self.test_bucket = Path('s3://stor-test-bucket') self.test_dir = self.test_bucket / 'test'
def _env_chdir(pth): """Sets the new current working directory.""" parser = _get_env() if utils.is_obs_path(pth): service = Path(pth).drive.rstrip(':/') else: raise ValueError('%s is an invalid path' % pth) if pth != Path(pth).drive: if not Path(pth).isdir(): raise ValueError('%s is not a directory' % pth) pth = utils.remove_trailing_slash(pth) parser.set('env', service, pth) with open(ENV_FILE, 'w') as outfile: parser.write(outfile)
def test_cached_auth_and_auth_invalidation(self): from swiftclient.client import get_auth_keystone as real_get_keystone swift._clear_cached_auth_credentials() tenant = self.test_container.tenant with mock.patch('swiftclient.client.get_auth_keystone', autospec=True) as mock_get_ks: mock_get_ks.side_effect = real_get_keystone s = Path(self.test_container).stat() self.assertEquals(s['Account'], tenant) self.assertEquals(len(mock_get_ks.call_args_list), 1) # The keystone auth should not be called on another stat mock_get_ks.reset_mock() s = Path(self.test_container).stat() self.assertEquals(s['Account'], tenant) self.assertEquals(len(mock_get_ks.call_args_list), 0) # Set the auth cache to something bad. The auth keystone should # be called twice on another stat. It's first called by the swiftclient # when retrying auth (with the bad token) and then called by us without # a token after the swiftclient raises an authorization error. mock_get_ks.reset_mock() swift._cached_auth_token_map[tenant]['creds'][ 'os_auth_token'] = 'bad_auth' s = Path(self.test_container).stat() self.assertEquals(s['Account'], tenant) self.assertEquals(len(mock_get_ks.call_args_list), 2) # Note that the auth_token is passed into the keystone client but then popped # from the kwargs. Assert that an auth token is no longer part of the retry calls self.assertTrue( 'auth_token' not in mock_get_ks.call_args_list[0][0][3]) self.assertTrue( 'auth_token' not in mock_get_ks.call_args_list[1][0][3]) # Now make the auth always be invalid and verify that an auth error is thrown # This also tests that keystone auth errors are propagated as swift # AuthenticationErrors mock_get_ks.reset_mock() swift._clear_cached_auth_credentials() with mock.patch( 'keystoneclient.v2_0.client.Client') as mock_ks_client: from keystoneclient.exceptions import Unauthorized mock_ks_client.side_effect = Unauthorized with self.assertRaises(swift.AuthenticationError): Path(self.test_container).stat() # Verify that getting the auth was called two more times because of retry # logic self.assertEquals(len(mock_get_ks.call_args_list), 2)
def create_dataset(self, directory, num_objects, min_object_size): """Creates a test dataset with predicatable names and contents Files are named from 0 to num_objects (exclusive), and their contents is file_name * min_object_size. Note that the actual object size is dependent on the object name and should be taken into consideration when testing. """ Path(directory).makedirs_p() with Path(directory): for name in self.get_dataset_obj_names(num_objects): with open(name, 'w') as f: f.write( self.get_dataset_obj_contents( name, min_object_size))
def test_upload_multiple_dirs(self): with NamedTemporaryDirectory(change_dir=True) as tmp_d: num_test_objs = 10 tmp_d = Path(tmp_d) # Create files filled with random data. path1 = tmp_d / 'dir1' os.mkdir(path1) self.create_dataset(path1, num_test_objs, 10) # Create empty dir and file. path2 = tmp_d / 'dir2' os.mkdir(path2) os.mkdir(path2 / 'my_dir') open(path2 / 'my_dir' / 'included_file', 'w').close() open(path2 / 'my_dir' / 'excluded_file', 'w').close() os.mkdir(path2 / 'my_dir' / 'included_dir') os.mkdir(path2 / 'my_dir' / 'excluded_dir') # Create file in the top level directory. open(tmp_d / 'top_level_file', 'w').close() to_upload = [ 'dir1', 'dir2/my_dir/included_file', 'dir2/my_dir/included_dir', 'top_level_file', ] with tmp_d: dx_p = self.test_dir / 'subdir' dx_p.upload(to_upload) # Validate the contents of the manifest file uploaded_contents = stor.list(dx_p) expected_contents = [ Path('dir1') / name for name in self.get_dataset_obj_names(num_test_objs) ] expected_contents.extend([ 'dir2/my_dir/included_file', 'top_level_file', ]) expected_contents = [dx_p / c for c in expected_contents] self.assertEquals(set(uploaded_contents), set(expected_contents)) empty_dir = dx_p / 'dir2/my_dir/included_dir' self.assertTrue(stor.isdir(empty_dir))
def upload(self, to_upload, **kwargs): """Upload a list of files and directories to a directory. This is not a batch level operation. If some file errors, the files uploaded before will remain present. Args: to_upload (List[Union[str, OBSUploadObject]]): A list of posix file names, directory names, or OBSUploadObject objects to upload. Raises: ValueError: When source path is not a directory TargetExistsError: When destination directory already exists """ dx_upload_objects = [ name for name in to_upload if isinstance(name, OBSUploadObject) ] all_files_to_upload = utils.walk_files_and_dirs([ name for name in to_upload if not isinstance(name, OBSUploadObject) ]) dx_upload_objects.extend([ OBSUploadObject( f, object_name=('/' + self.resource if self.resource else Path('')) / utils.file_name_to_object_name(f)) for f in all_files_to_upload ]) for upload_obj in dx_upload_objects: upload_obj.object_name = Path(upload_obj.object_name) upload_obj.source = Path(upload_obj.source) dest_file = Path('{drive}{project}:{path}'.format( drive=self.drive, project=self.canonical_project, path=upload_obj.object_name)) if upload_obj.source.isfile(): dest_is_file = dest_file.isfile() if dest_is_file: # only occurs if upload is called directly with existing objects logger.warning( 'Destination path ({}) already exists, will not cause ' 'duplicate file objects on the platform. Skipping...'. format(dest_file)) else: with _wrap_dx_calls(): dxpy.upload_local_file( filename=upload_obj.source, project=self.canonical_project, folder='/' + (dest_file.parent.resource or ''), parents=True, name=dest_file.name) elif upload_obj.source.isdir(): dest_file.makedirs_p() else: raise stor_exceptions.NotFoundError( 'Source path ({}) does not exist. Please provide a valid source' .format(upload_obj.source))
def test_condition_failures(self): num_test_objs = 20 test_obj_size = 100 test_dir = self.test_container / 'test' with NamedTemporaryDirectory(change_dir=True) as tmp_d: self.create_dataset(tmp_d, num_test_objs, test_obj_size) Path('.').copytree(test_dir) # Verify a ConditionNotMet exception is thrown when attempting to list # a file that hasn't been uploaded expected_objs = { test_dir / which_obj for which_obj in self.get_dataset_obj_names(num_test_objs + 1) } num_retries = settings.get()['swift']['num_retries'] with mock.patch('time.sleep') as mock_sleep: with self.assertRaises(swift.ConditionNotMetError): test_dir.list(condition=lambda results: expected_objs == set(results)) self.assertTrue(num_retries > 0) self.assertEquals(len(mock_sleep.call_args_list), num_retries) # Verify that the condition passes when excluding the non-extant file expected_objs = { test_dir / which_obj for which_obj in self.get_dataset_obj_names(num_test_objs) } objs = test_dir.list(condition=lambda results: expected_objs == set(results)) self.assertEquals(expected_objs, set(objs))
def test_properties(self): # Create sample path object. f = p(nt='C:\\Program Files\\Python\\Lib\\xyzzy.py', posix='/usr/local/python/lib/xyzzy.py') f = Path(f) # .parent nt_lib = 'C:\\Program Files\\Python\\Lib' posix_lib = '/usr/local/python/lib' expected = p(nt=nt_lib, posix=posix_lib) assert f.parent == expected # .name assert f.name == 'xyzzy.py' assert f.parent.name == p(nt='Lib', posix='lib') # .namebase assert f.namebase == 'xyzzy' # .ext assert f.ext == '.py' assert f.parent.ext == '' # .drive assert f.drive == p(nt='C:', posix='')
def test_condition(self): num_test_objs = 20 test_obj_size = 100 with NamedTemporaryDirectory(change_dir=True) as tmp_d: self.create_dataset(tmp_d, num_test_objs, test_obj_size) Path('.').copytree(self.test_dir) # Verify a ConditionNotMet exception is thrown when attempting to list # a file that hasn't been uploaded expected_objs = { self.test_dir / which_obj for which_obj in self.get_dataset_obj_names(num_test_objs + 1) } with self.assertRaises(exceptions.ConditionNotMetError): self.test_dir.list( condition=lambda results: expected_objs == set(results)) # Verify that the condition passes when excluding the non-extant file correct_objs = { self.test_dir / which_obj for which_obj in self.get_dataset_obj_names(num_test_objs) } objs = self.test_dir.list( condition=lambda results: correct_objs == set(results)) self.assertEquals(correct_objs, set(objs))
def test_disable_backoff(self): path = Path('swift://AUTH_stor_test/container/test/') swift_opts = {'num_retries': 0} utils.is_writeable(path, swift_opts) self.mock_copy.assert_called_with(self.filename, path, swift_retry_options=swift_opts)
def test_list_glob(self): num_test_objs = 20 test_obj_size = 100 test_dir = self.test_container / 'test' with NamedTemporaryDirectory(change_dir=True) as tmp_d: self.create_dataset(tmp_d, num_test_objs, test_obj_size) Path('.').copytree(test_dir) objs = set( test_dir.list( condition=lambda results: len(results) == num_test_objs)) expected_objs = { test_dir / obj_name for obj_name in self.get_dataset_obj_names(num_test_objs) } self.assertEquals(len(objs), num_test_objs) self.assertEquals(objs, expected_objs) expected_glob = { test_dir / obj_name for obj_name in self.get_dataset_obj_names(num_test_objs) if obj_name.startswith('1') } self.assertTrue(len(expected_glob) > 1) globbed_objs = set( test_dir.glob( '1*', condition=lambda results: len(results) == len(expected_glob))) self.assertEquals(globbed_objs, expected_glob)
def test_rmtree(self): with NamedTemporaryDirectory(change_dir=True) as tmp_d: # Make a couple empty test files and nested files tmp_d = Path(tmp_d) os.mkdir(tmp_d / 'my_dir') open(tmp_d / 'my_dir' / 'dir_file1', 'w').close() open(tmp_d / 'my_dir' / 'dir_file2', 'w').close() open(tmp_d / 'base_file1', 'w').close() open(tmp_d / 'base_file2', 'w').close() stor.copytree( '.', self.test_container, use_manifest=True) swift_dir = self.test_container / 'my_dir' self.assertEquals(len(swift_dir.list()), 2) swift_dir.rmtree() self.assertEquals(len(swift_dir.list()), 0) base_contents = self.test_container.list() self.assertTrue((self.test_container / 'base_file1') in base_contents) self.assertTrue((self.test_container / 'base_file1') in base_contents) self.test_container.rmtree() # TODO figure out a better way to test that the container no longer exists. with self.assertRaises(swift.NotFoundError): # Replication may have not happened yet for container deletion. Keep # listing in intervals until a NotFoundError is thrown for i in (0, 1, 3): time.sleep(i) self.test_container.list()
def process_args(args): args_copy = copy.copy(vars(args)) config = args_copy.pop('config', None) func = args_copy.pop('func', None) pth = args_copy.pop('path', None) cmd = args_copy.pop('cmd', None) if config: settings.update(settings.parse_config_file(config)) func_kwargs = { key: Path(val) if type(val) is TempPath else val for key, val in args_copy.items() if val } try: if pth: return func(pth, **func_kwargs) return func(**func_kwargs) except NotImplementedError: if pth: value = pth elif len(func_kwargs) > 0: value = list(func_kwargs.values())[0] else: perror('%s is not a valid command for the given input\n' % cmd) perror('%s is not a valid command for %s\n' % (cmd, value)) except ValueError as exc: perror('Error: %s\n' % str(exc)) except exceptions.RemoteError as exc: if type(exc) is exceptions.NotFoundError and pth: perror('Not Found: %s' % pth) perror('%s: %s\n' % (exc.__class__.__name__, str(exc)))
def file_name_to_object_name(p): """Given a file path, construct its object name. Any relative or absolute directory markers at the beginning of the path will be stripped, for example:: ../../my_file -> my_file ./my_dir -> my_dir .hidden_dir/file -> .hidden_dir/file /absolute_dir -> absolute_dir Note that windows paths will have their back slashes changed to forward slashes:: C:\\my\\windows\\file -> my/windows/file Args: p (str): The input path Returns: PosixPath: The object name. An empty path will be returned in the case of the input path only consisting of absolute or relative directory markers (i.e. '/' -> '', './' -> '') """ from stor import Path from stor.posix import PosixPath p_parts = Path(p).expand().splitdrive()[1].split(os.path.sep) obj_start = next((i for i, part in enumerate(p_parts) if part not in ('', '..', '.')), None) return PosixPath.parts_class('/'.join(p_parts[obj_start:]) if obj_start is not None else '')
def test_w_chdir(self): tmp_d = None with utils.NamedTemporaryDirectory(change_dir=True) as tmp_d: self.assertTrue(tmp_d.exists()) p = Path('.').expand().abspath() self.assertTrue(tmp_d in p) self.assertFalse(tmp_d.exists())
def test_static_large_obj_copy_and_segment_container(self): with NamedTemporaryDirectory(change_dir=True) as tmp_d: segment_size = 1048576 obj_size = segment_size * 4 + 100 self.create_dataset(tmp_d, 1, obj_size) obj_path = stor.join(tmp_d, self.get_dataset_obj_names(1)[0]) options = {'swift:upload': {'segment_size': segment_size}} with settings.use(options): obj_path.copy(self.test_container / 'large_object.txt') # Verify there is a segment container and that it can be ignored when listing a dir segment_container = Path(self.test_container.parent) / ('.segments_%s' % self.test_container.name) # noqa containers = Path(self.test_container.parent).listdir(ignore_segment_containers=False) self.assertTrue(segment_container in containers) self.assertTrue(self.test_container in containers) containers = Path(self.test_container.parent).listdir(ignore_segment_containers=True) self.assertFalse(segment_container in containers) self.assertTrue(self.test_container in containers) # Verify there are five segments objs = set(segment_container.list(condition=lambda results: len(results) == 5)) self.assertEquals(len(objs), 5) # Copy back the large object and verify its contents obj_path = Path(tmp_d) / 'large_object.txt' Path(self.test_container / 'large_object.txt').copy(obj_path) self.assertCorrectObjectContents(obj_path, self.get_dataset_obj_names(1)[0], obj_size)
def test_context_manager(self): """Can be used as context manager for chdir.""" with NamedTemporaryDirectory() as tmpdir: d = Path(tmpdir) subdir = d / 'subdir' subdir.makedirs() old_dir = os.getcwd() with subdir: assert os.getcwd() == os.path.realpath(subdir) assert os.getcwd() == old_dir
def test_construction_from_none(self): """ """ for val in (1, None): try: Path(val) except TypeError: pass else: raise Exception("DID NOT RAISE on %s" % val)
def test_chdir_or_cd(self): """ tests the chdir or cd method """ original_dir = os.getcwd() with NamedTemporaryDirectory() as tmpdir: current = Path(str(tmpdir)).expand() os.chdir(str(tmpdir)) current = Path(os.getcwd()) target = (current / 'subdir') target.makedirs_p() target.chdir() assert Path(os.getcwd()) == target current.chdir() assert Path(os.getcwd()) == current with target: assert Path(os.getcwd()).expand() == target assert Path(os.getcwd()).expand() == current os.chdir(original_dir)
def setUp(self): super(S3IntegrationTest, self).setUp() if not (os.environ.get('AWS_TEST_ACCESS_KEY_ID') and os.environ.get('AWS_TEST_SECRET_ACCESS_KEY')): raise unittest.SkipTest( 'AWS_TEST_ACCESS_KEY_ID / AWS_TEST_SECRET_ACCESS_KEY env var not set.' ' Skipping integration test') # Disable loggers so nose output is clean logging.getLogger('botocore').setLevel(logging.CRITICAL) self.test_bucket = Path('s3://stor-test-bucket') self.test_dir = self.test_bucket / 'test' stor.settings.update({ 's3': { 'aws_access_key_id': os.environ['AWS_TEST_ACCESS_KEY_ID'], 'aws_secret_access_key': os.environ['AWS_TEST_SECRET_ACCESS_KEY'] } })
def get_path(pth, mode=None): """ Convert string to a Path type. The string ``-`` is a special string depending on mode. With mode 'r', it represents stdin and a temporary file is created and returned. """ service = _obs_relpath_service(pth) if not service: return Path(pth) relprefix = service + ':' pwd = Path(_get_pwd(service=service)) if pwd == pwd.drive: raise ValueError( 'No current directory specified for relative path \'%s\'' % pth) pwd = utils.remove_trailing_slash(pwd) path_part = pth[len(relprefix):] split_parts = path_part.split('/') rel_part = split_parts[0] prefix = pwd depth = 1 if rel_part == '..': # remove trailing slash otherwise we won't find the right parent prefix = utils.remove_trailing_slash(prefix) while len(split_parts) > depth and split_parts[depth] == '..': depth += 1 if len(pwd[len(pwd.drive):].split('/')) > depth: for i in range(0, depth): prefix = prefix.parent else: raise ValueError( 'Relative path \'%s\' is invalid for current directory \'%s\'' % (pth, pwd)) elif rel_part != '.': return prefix / path_part return prefix / path_part.split(rel_part, depth)[depth].lstrip('/')
def NamedTemporaryDirectory(suffix='', prefix='tmp', dir=None, change_dir=False): """Context manager for creating and deleting temporary directory. Mimics the behavior of tempfile.NamedTemporaryFile. Arguments: suffix (str): If specified, the dir name will end with it. prefix (str): If specified, the dir name will start with it, otherwise 'tmp' is used. dir (str): If specified, the dir will be created in this directory. change_dir (bool): If specified, will change to the temporary directory. Yields: Path: The temporary directory. Note: Name is CamelCase to match tempfile.NamedTemporaryFile. Examples: >>> from stor import NamedTemporaryDirectory >>> with NamedTemporaryDirectory() as d: >>> # Do operations within "d", which will be deleted afterwards """ from stor import Path tempdir = Path(tempfile.mkdtemp(suffix, prefix, dir)) try: if change_dir: with tempdir: yield tempdir else: yield tempdir finally: tempdir.rmtree()