def download_dir_contents(self, prefix, local_dir): util.mkdir_p(local_dir) prefix = util.ensure_suffix(prefix, "/") for key, _ in self._get_matching_s3_keys_generator(prefix): if key.endswith("/"): continue rel_path = util.trim_prefix(key, prefix) local_dest_path = os.path.join(local_dir, rel_path) self.download_file(key, local_dest_path)
def download_dir_contents(self, prefix: str, local_dir: str): util.mkdir_p(local_dir) prefix = util.ensure_suffix(prefix, "/") for blob in self.gcs.list_blobs(prefix=prefix): if blob.name.endswith("/"): continue relative_path = util.trim_prefix(blob.name, prefix) local_dest_path = os.path.join(local_dir, relative_path) self.download_file(blob.name, local_dest_path)
def download_dir_contents(self, prefix, local_dir): util.mkdir_p(local_dir) prefix = util.ensure_suffix(prefix, "/") for key, _ in self._get_matching_s3_keys_generator( prefix, include_dir_objects=True): rel_path = util.trim_prefix(key, prefix) local_dest_path = os.path.join(local_dir, rel_path) if not local_dest_path.endswith("/"): self.download_file(key, local_dest_path) else: util.mkdir_p(os.path.dirname(local_dest_path))
def download_dir_contents(self, prefix: str, local_dir: str): util.mkdir_p(local_dir) prefix = util.ensure_suffix(prefix, "/") for blob in self._gcs_matching_blobs_generator( prefix=prefix, include_dir_objects=True): relative_path = util.trim_prefix(blob.name, prefix) local_dest_path = os.path.join(local_dir, relative_path) if not local_dest_path.endswith("/"): self.download_file(blob.name, local_dest_path) else: util.mkdir_p(os.path.dirname(local_dest_path))
def deconstruct_s3_path(s3_path: str) -> Tuple[str, str]: path = util.trim_prefix(s3_path, "s3://") bucket = path.split("/")[0] key = os.path.join(*path.split("/")[1:]) return bucket, key