def upload_dir(self, dirname, blob, container_name=None, use_basename=True): """ Uploads a local directory to to Google Cloud Storage. Args: dirname: `str`. name of the directory to upload. blob: `str`. blob to upload to. container_name: `str`. the name of the container. use_basename: `bool`. whether or not to use the basename of the directory. """ if not container_name: container_name, _, blob = self.parse_wasbs_url(blob) if use_basename: blob = append_basename(blob, dirname) # Turn the path to absolute paths dirname = os.path.abspath(dirname) with get_files_in_current_directory(dirname) as files: for f in files: file_blob = os.path.join(blob, os.path.relpath(f, dirname)) self.upload_file(filename=f, blob=file_blob, container_name=container_name, use_basename=False)
def test_get_files_in_current_directory(self): dirname = tempfile.mkdtemp() fpath1 = dirname + '/test1.txt' with open(fpath1, 'w') as f: f.write('data1') fpath2 = dirname + '/test2.txt' with open(fpath2, 'w') as f: f.write('data2') dirname2 = tempfile.mkdtemp(prefix=dirname + '/') fpath3 = dirname2 + '/test3.txt' with open(fpath3, 'w') as f: f.write('data3') with get_files_in_current_directory(dirname) as files: assert len(files) == 3 assert set(files) == {fpath1, fpath2, fpath3}
def upload_dir(self, dirname, key, bucket_name=None, overwrite=False, encrypt=False, acl=None, use_basename=True): """ Uploads a local directory to S3. Args: dirname: `str`. name of the directory to upload. key: `str`. S3 key that will point to the file. bucket_name: `str`. Name of the bucket in which to store the file. overwrite: `bool`. A flag to decide whether or not to overwrite the key if it already exists. If replace is False and the key exists, an error will be raised. encrypt: `bool`. If True, the file will be encrypted on the server-side by S3 and will be stored in an encrypted form while at rest in S3. acl: `str`. ACL to use for uploading, e.g. "public-read". use_basename: `bool`. whether or not to use the basename of the directory. """ if not bucket_name: bucket_name, key = self.parse_s3_url(key) if use_basename: key = append_basename(key, dirname) # Turn the path to absolute paths dirname = os.path.abspath(dirname) with get_files_in_current_directory(dirname) as files: for f in files: file_key = os.path.join(key, os.path.relpath(f, dirname)) self.upload_file(filename=f, key=file_key, bucket_name=bucket_name, overwrite=overwrite, encrypt=encrypt, acl=acl, use_basename=False)