def test_full_copy(self): src_path = self.get_test_path('test_full_copy_src') dst_path = self.get_test_path('test_full_copy_dst') src_file_path = join(src_path, 'file.txt') dst_file_path = join(dst_path, 'file.txt') with self.cleaning(src_path, dst_path): self.fs.mkdir(src_path) self.fs.mkdir(dst_path) data = "To boldly go where no one has gone before\n" * 2000 self.fs.create(src_file_path, data=data) # File to directory copy. self.fs.copy(src_file_path, dst_path) assert_true(self.fs.exists(dst_file_path)) # Directory to directory copy. self.fs.copy(src_path, dst_path, True) base_name = parse_uri(src_path)[2] dst_folder_path = join(dst_path, base_name) assert_true(self.fs.exists(dst_folder_path)) assert_true(self.fs.exists(join(dst_folder_path, 'file.txt'))) # Copy directory to file should fail. assert_raises(S3FileSystemException, self.fs.copy, src_path, dst_file_path, True)
def test_rename_dir(self): src_dir = self.get_test_path('test_rename_dir_src') dst_dir = self.get_test_path('test_rename_dir_dst') with self.cleaning(src_dir, dst_dir): self.fs.mkdir(src_dir) self.fs.create(join(src_dir, 'file_one.txt'), data='foo') self.fs.create(join(src_dir, 'file_two.txt'), data='bar') src_ls = self.fs.listdir(src_dir) eq_(2, len(src_ls)) assert_true('file_one.txt' in src_ls) assert_true('file_two.txt' in src_ls) # Assert that no directories with dst_dir name exist yet assert_false(self.fs.exists(dst_dir)) # Rename src to dst self.fs.rename(src_dir, dst_dir) assert_true(self.fs.exists(dst_dir)) assert_false(self.fs.exists(src_dir)) dst_ls = self.fs.listdir(dst_dir) eq_(2, len(dst_ls)) assert_true('file_one.txt' in dst_ls) assert_true('file_two.txt' in dst_ls) # Assert that only the renamed directory, and not an empty file, exists bucket_ls = self.bucket.list() assert_equal( 1, len([ key for key in bucket_ls if key.name.strip('/') == self.get_key(dst_dir).name.strip('/') ]))
def test_rename_dir(self): src_dir = self.get_test_path('test_rename_dir_src') dst_dir = self.get_test_path('test_rename_dir_dst') with self.cleaning(src_dir, dst_dir): self.fs.mkdir(src_dir) self.fs.create(join(src_dir, 'file_one.txt'), data='foo') self.fs.create(join(src_dir, 'file_two.txt'), data='bar') src_ls = self.fs.listdir(src_dir) eq_(2, len(src_ls)) assert_true('file_one.txt' in src_ls) assert_true('file_two.txt' in src_ls) # Assert that no directories with dst_dir name exist yet assert_false(self.fs.exists(dst_dir)) # Rename src to dst self.fs.rename(src_dir, dst_dir) assert_true(self.fs.exists(dst_dir)) assert_false(self.fs.exists(src_dir)) dst_ls = self.fs.listdir(dst_dir) eq_(2, len(dst_ls)) assert_true('file_one.txt' in dst_ls) assert_true('file_two.txt' in dst_ls) # Assert that the children files are not duplicated at top-level destination bucket_ls = self.bucket.list() assert_false('file_one.txt' in bucket_ls) assert_false('file_two.txt' in bucket_ls) # Assert that only the renamed directory, and not an empty file, exists assert_equal(1, len([key for key in bucket_ls if key.name.strip('/') == self.get_key(dst_dir).name.strip('/')]))
def test_full_copy(self): src_path = self.get_test_path('test_full_copy_src') dst_path = self.get_test_path('test_full_copy_dst') src_file_path = join(src_path, 'file.txt') dst_file_path = join(dst_path, 'file.txt') with self.cleaning(src_path, dst_path): self.fs.mkdir(src_path) self.fs.mkdir(dst_path) data = "To boldly go where no one has gone before\n" * 2000 self.fs.create(src_file_path, data=data) # File to directory copy. self.fs.copy(src_file_path, dst_path) assert_true(self.fs.exists(dst_file_path)) # Directory to directory copy. self.fs.copy(src_path, dst_path, True) base_name = parse_uri(src_path)[2] dst_folder_path = join(dst_path, base_name) assert_true(self.fs.exists(dst_folder_path)) assert_true(self.fs.exists(join(dst_folder_path, 'file.txt'))) # Copy directory to file should fail. assert_raises(IOError, self.fs.copy, src_path, dst_file_path, True)
def test_rename_star(self): src_dir = self.get_test_path('test_rename_star_src') dst_dir = self.get_test_path('test_rename_star_dst') with self.cleaning(src_dir, dst_dir): self.fs.mkdir(src_dir) self.fs.create(join(src_dir, 'file_one.txt'), data='foo') self.fs.create(join(src_dir, 'file_two.txt'), data='bar') src_ls = self.fs.listdir(src_dir) eq_(2, len(src_ls)) assert_true('file_one.txt' in src_ls) assert_true('file_two.txt' in src_ls) src_stat = self.fs.listdir_stats(src_dir) self.fs.mkdir(dst_dir) self.fs.rename_star(src_dir, dst_dir) dst_stat = self.fs.listdir_stats(dst_dir) src_names = set([stat.name for stat in src_stat]) dst_names = set([stat.name for stat in dst_stat]) assert_true(src_names) eq_(src_names, dst_names)
def rename_star(self, old_dir, new_dir): if not self.isdir(old_dir): raise S3FileSystemException("'%s' is not a directory" % old_dir) if self.isfile(new_dir): raise S3FileSystemException("'%s' is not a directory" % new_dir) ls = self.listdir(old_dir) for entry in ls: self.rename(s3.join(old_dir, entry), s3.join(new_dir, entry))
def rename_star(self, old_dir, new_dir): if not self.isdir(old_dir): raise IOError(errno.ENOTDIR, "'%s' is not a directory" % old_dir) if self.isfile(new_dir): raise IOError(errno.ENOTDIR, "'%s' is not a directory" % new_dir) ls = self.listdir(old_dir) for entry in ls: self.rename(s3.join(old_dir, entry), s3.join(new_dir, entry))
def _copy(self, src, dst, recursive, use_src_basename): src_st = self.stats(src) if src_st.isDir and not recursive: return # omitting directory dst = s3.abspath(src, dst) dst_st = self._stats(dst) if src_st.isDir and dst_st and not dst_st.isDir: raise IOError( errno.EEXIST, "Cannot overwrite non-directory '%s' with directory '%s'" % (dst, src)) src_bucket, src_key = s3.parse_uri(src)[:2] dst_bucket, dst_key = s3.parse_uri(dst)[:2] keep_src_basename = use_src_basename and dst_st and dst_st.isDir src_bucket = self._get_bucket(src_bucket) dst_bucket = self._get_bucket(dst_bucket) if keep_src_basename: cut = len( posixpath.dirname(src_key)) # cut of an parent directory name if cut: cut += 1 else: cut = len(src_key) if not src_key.endswith('/'): cut += 1 for key in src_bucket.list(prefix=src_key): if not key.name.startswith(src_key): raise RuntimeError("Invalid key to transform: %s" % key.name) dst_name = posixpath.normpath(s3.join(dst_key, key.name[cut:])) key.copy(dst_bucket, dst_name)
def _copy(self, src, dst, recursive, use_src_basename): src_st = self.stats(src) if src_st.isDir and not recursive: return # omitting directory dst = s3.abspath(src, dst) dst_st = self._stats(dst) if src_st.isDir and dst_st and not dst_st.isDir: raise S3FileSystemException("Cannot overwrite non-directory '%s' with directory '%s'" % (dst, src)) src_bucket, src_key = s3.parse_uri(src)[:2] dst_bucket, dst_key = s3.parse_uri(dst)[:2] keep_src_basename = use_src_basename and dst_st and dst_st.isDir src_bucket = self._get_bucket(src_bucket) dst_bucket = self._get_bucket(dst_bucket) if keep_src_basename: cut = len(posixpath.dirname(src_key)) # cut of an parent directory name if cut: cut += 1 else: cut = len(src_key) if not src_key.endswith('/'): cut += 1 for key in src_bucket.list(prefix=src_key): if not key.name.startswith(src_key): raise S3FileSystemException(_("Invalid key to transform: %s") % key.name) dst_name = posixpath.normpath(s3.join(dst_key, key.name[cut:])) if self.isdir(normpath(self.join(S3A_ROOT, key.bucket.name, key.name))): dst_name = self._append_separator(dst_name) key.copy(dst_bucket, dst_name)
def _copy(self, src, dst, recursive, use_src_basename): src_st = self.stats(src) if src_st.isDir and not recursive: return # omitting directory dst = s3.abspath(src, dst) dst_st = self._stats(dst) if src_st.isDir and dst_st and not dst_st.isDir: raise IOError(errno.EEXIST, "Cannot overwrite non-directory '%s' with directory '%s'" % (dst, src)) src_bucket, src_key = s3.parse_uri(src)[:2] dst_bucket, dst_key = s3.parse_uri(dst)[:2] keep_src_basename = use_src_basename and dst_st and dst_st.isDir src_bucket = self._get_bucket(src_bucket) dst_bucket = self._get_bucket(dst_bucket) if keep_src_basename: cut = len(posixpath.dirname(src_key)) # cut of an parent directory name if cut: cut += 1 else: cut = len(src_key) if not src_key.endswith('/'): cut += 1 for key in src_bucket.list(prefix=src_key): if not key.name.startswith(src_key): raise RuntimeError(_("Invalid key to transform: %s") % key.name) dst_name = posixpath.normpath(s3.join(dst_key, key.name[cut:])) key.copy(dst_bucket, dst_name)
def test_rmtree(self): assert_raises(NotImplementedError, self.fs.rmtree, 'universe', skipTrash=False) directory = self.get_test_path('test_rmtree') with self.cleaning(directory): self.fs.mkdir(directory) nested_dir = join(directory, 'nested_dir') self.fs.mkdir(nested_dir) file_path = join(nested_dir, 'file') key = self.get_key(file_path) key.set_contents_from_string('Some content') self.fs.rmtree(directory, skipTrash=True) assert_false(self.fs.exists(file_path)) assert_false(self.fs.exists(nested_dir)) assert_false(self.fs.exists(directory))
def test_exists(self): dir_path = self.get_test_path('test_exists') file_path = join(dir_path, 'file') assert_false(self.fs.exists(dir_path)) assert_false(self.fs.exists(file_path)) self.fs.create(file_path) assert_true(self.fs.exists(dir_path)) assert_true(self.fs.exists(file_path)) assert_true(self.fs.exists('s3a://%s' % self.bucket_name)) assert_true(self.fs.exists('s3a://')) fake_bucket = 'fake%s' % generate_id(8, string.ascii_lowercase + string.digits) assert_false(self.fs.exists('s3a://%s' % fake_bucket))
def test_exists(self): dir_path = self.get_test_path('test_exists') file_path = join(dir_path, 'file') assert_false(self.fs.exists(dir_path)) assert_false(self.fs.exists(file_path)) self.fs.create(file_path) assert_true(self.fs.exists(dir_path)) assert_true(self.fs.exists(file_path)) assert_true(self.fs.exists('s3://%s' % self.bucket_name)) assert_true(self.fs.exists('s3://')) fake_bucket = 'fake%s' % generate_id(8, string.ascii_lowercase + string.digits) assert_false(self.fs.exists('s3://%s' % fake_bucket))
def join(*comp_list): return s3.join(*comp_list)
def get_test_path(cls, path=None): base_path = join('s3a://', cls.bucket_name, cls.path_prefix) if path: return join(base_path, path) return base_path