Esempio n. 1
0
def delete_object_version(s3: S3,
                          allversion: bool = False,
                          mfa: str = "") -> None:
    """Delete versions of a object.

    :param s3: S3 instance
    :type s3: S3
    :param allversion: skip verison selection and select all verions
    :type allversion: bool, optional
    :param mfa: mfa serial number and code seperate by space to use mfa privilage
    :type mfa: str, optional
    """
    obj_versions = s3.get_object_version(
        delete=True,
        select_all=allversion,
        multi_select=True if not mfa else False)

    for obj_version in obj_versions:
        print("(dryrun) delete: s3://%s/%s with version %s" %
              (s3.bucket_name, obj_version.get("Key"),
               obj_version.get("VersionId")))
    if get_confirmation("Confirm?"):
        for obj_version in obj_versions:
            print("delete: s3://%s/%s with version %s" % (
                s3.bucket_name,
                obj_version.get("Key"),
                obj_version.get("VersionId"),
            ))
            s3.client.delete_object(
                Bucket=s3.bucket_name,
                Key=obj_version.get("Key"),
                MFA=mfa,
                VersionId=obj_version.get("VersionId"),
            )
Esempio n. 2
0
def download_version(s3: S3, obj_versions: List[Dict[str, str]],
                     local_path: str) -> None:
    """Download versions of a object.

    :param s3: instance of S3
    :type s3: S3
    :param obj_versions: list of object and their versions to download
    :type obj_versions: List[Dict[str, str]]
    :param local_path: local directory to download
    :type local_path: str
    """
    for obj_version in obj_versions:
        destination_path = os.path.join(
            local_path, os.path.basename(obj_version.get("Key", "")))
        print("(dryrun) download: s3://%s/%s to %s with version %s" % (
            s3.bucket_name,
            obj_version.get("Key"),
            destination_path,
            obj_version.get("VersionId"),
        ))

    if get_confirmation("Confirm"):
        for obj_version in obj_versions:
            destination_path = os.path.join(
                local_path, os.path.basename(obj_version.get("Key", "")))
            print("download: s3://%s/%s to %s with version %s" % (
                s3.bucket_name,
                obj_version.get("Key"),
                destination_path,
                obj_version.get("VersionId"),
            ))
            transfer = S3TransferWrapper(s3.client)
            transfer.s3transfer.download_file(
                s3.bucket_name,
                obj_version.get("Key"),
                destination_path,
                extra_args={"VersionId": obj_version.get("VersionId")},
                callback=S3Progress(
                    obj_version.get("Key", ""),
                    s3.bucket_name,
                    s3.client,
                    obj_version.get("VersionId"),
                ),
            )
Esempio n. 3
0
def download_recusive(s3: S3, exclude: List[str], include: List[str],
                      local_path: str) -> None:
    """Download s3 recursive.

    :param s3: S3 instance
    :type s3: S3
    :param exclude: glob pattern to exclude
    :type exclude: List[str]
    :param include: glob pattern to include
    :type include: List[str]
    :param local_path: local directory to download
    :type local_path: str
    """
    download_list = walk_s3_folder(
        s3.client,
        s3.bucket_name,
        s3.path_list[0],
        s3.path_list[0],
        [],
        exclude,
        include,
        "download",
        local_path,
    )

    if get_confirmation("Confirm?"):
        for s3_key, dest_pathname in download_list:
            if not os.path.exists(os.path.dirname(dest_pathname)):
                os.makedirs(os.path.dirname(dest_pathname))
            print("download: s3://%s/%s to %s" %
                  (s3.bucket_name, s3_key, dest_pathname))
            transfer = S3TransferWrapper(s3.client)
            transfer.s3transfer.download_file(
                s3.bucket_name,
                s3_key,
                dest_pathname,
                callback=S3Progress(s3_key, s3.bucket_name, s3.client),
            )
Esempio n. 4
0
def delete_s3(
    profile: Union[str, bool] = False,
    bucket: str = None,
    recursive: bool = False,
    exclude: Optional[List[str]] = [],
    include: Optional[List[str]] = [],
    mfa: str = "",
    version: bool = False,
    allversion: bool = False,
    deletemark: bool = False,
    clean: bool = False,
) -> None:
    """Delete file/directory on the selected s3 bucket.

    :param profile: use a different profile for this operation
    :type profile: Union[str, bool], optional
    :param bucket: specify a bucket to operate
    :type bucket: str, optional
    :param recursive: recursive delete
    :type recursive: bool, optional
    :param exclude: glob pattern to exclude
    :type exclude: List[str], optional
    :param include: glob pattern to include
    :type include: List[str], optional
    :param mfa: specify mfa information to operate MFA delete
    :type mfa: str, optional
    :param version: delete selected version
    :type version: bool, optional
    :param allversion: skip selection of version, delete all versions
    :type allversion: bool, optional
    :param deletemark: only display files with delete mark
    :type deletemark: bool, optional
    :param clean: recusive delete all olderversions but leave the current version
    :type clean: bool, optional
    """
    if exclude is None:
        exclude = []
    if include is None:
        include = []

    s3 = S3(profile)

    if deletemark:
        version = True
    if allversion:
        version = True
    if clean:
        version = True
        allversion = True
        recursive = True
    if mfa:
        # mfa operation can only operate on one object
        # because each time, it will require a new mfa code
        recursive = False
        allversion = False
        clean = False

    s3.set_bucket_and_path(bucket)
    if not s3.bucket_name:
        s3.set_s3_bucket()
    if recursive:
        if not s3.path_list[0]:
            s3.set_s3_path()
    else:
        if not s3.path_list[0]:
            s3.set_s3_object(
                version=version,
                multi_select=True if not mfa else False,
                deletemark=deletemark,
            )

    if recursive:
        delete_object_recursive(s3, exclude, include, deletemark, clean,
                                allversion)

    elif version:
        delete_object_version(s3, allversion, mfa)

    else:
        # due the fact without recursive flag s3.bucket_path is set by s3.set_s3_object
        # the bucket_path is the valid s3 key so we don't need to call s3.get_s3_destination_key
        for s3_path in s3.path_list:
            print("(dryrun) delete: s3://%s/%s" % (s3.bucket_name, s3_path))
        if get_confirmation("Confirm?"):
            for s3_path in s3.path_list:
                print("delete: s3://%s/%s" % (s3.bucket_name, s3_path))
                s3.client.delete_object(
                    Bucket=s3.bucket_name,
                    Key=s3_path,
                )
Esempio n. 5
0
def delete_object_recursive(
    s3: S3,
    exclude: Optional[List[str]] = None,
    include: Optional[List[str]] = None,
    deletemark: bool = False,
    clean: bool = False,
    allversion: bool = False,
) -> None:
    """Recursive delete object and their versions if specified.

    :param s3: S3 instance
    :type s3: S3
    :param exclude: glob pattern to exclude
    :type exclude: List[str], optional
    :param include: glob pattern to include
    :type include: List[str], optional
    :param deletemark: only delete deletemarkers
    :type deletemark: bool, optional
    :param clean: delete all versions except the current version
    :type clean: bool, optional
    :param allversion: delete allversions, use to nuke the entire bucket or folder
    :type allversion: bool, optional
    """
    if allversion:
        # use a different method other than the walk s3 folder
        # since walk_s3_folder doesn't provide access to deleted version object
        # delete_all_versions method will list all files including deleted versions or even delete marker
        file_list = find_all_version_files(
            s3.client,
            s3.bucket_name,
            s3.path_list[0],
            [],
            exclude,
            include,
            deletemark,
        )
        obj_versions: List[Dict[str, str]] = []

        # loop through all files and get their versions
        for file in file_list:
            obj_versions.extend(
                s3.get_object_version(key=file,
                                      delete=True,
                                      select_all=True,
                                      non_current=clean))
            print("(dryrun) delete: s3://%s/%s %s" % (
                s3.bucket_name,
                file,
                "with all versions"
                if not clean else "all non-current versions",
            ))

        if get_confirmation("Delete %s?" %
                            ("all of their versions"
                             if not clean else "all non-current versions")):
            for obj_version in obj_versions:
                print("delete: s3://%s/%s with version %s" % (
                    s3.bucket_name,
                    obj_version.get("Key"),
                    obj_version.get("VersionId"),
                ))
                s3.client.delete_object(
                    Bucket=s3.bucket_name,
                    Key=obj_version.get("Key"),
                    VersionId=obj_version.get("VersionId"),
                )

    else:
        file_list = walk_s3_folder(
            s3.client,
            s3.bucket_name,
            s3.path_list[0],
            s3.path_list[0],
            [],
            exclude,
            include,
            "delete",
        )
        if get_confirmation("Confirm?"):
            for s3_key, _ in file_list:
                print("delete: s3://%s/%s" % (s3.bucket_name, s3_key))
                s3.client.delete_object(
                    Bucket=s3.bucket_name,
                    Key=s3_key,
                )
Esempio n. 6
0
def download_s3(
    profile: Union[str, bool] = False,
    bucket: str = None,
    local_path: str = None,
    recursive: bool = False,
    search_from_root: bool = False,
    sync: bool = False,
    exclude: Optional[List[str]] = None,
    include: Optional[List[str]] = None,
    hidden: bool = False,
    version: bool = False,
) -> None:
    """Download files/'directory' from s3.

    Handles sync, download file and download recursive from a s3 bucket.
    Glob pattern are first handled through exclude list and then include list.

    :param profile: profile to use for this operation
    :type profile: bool, optional
    :param bucket: specify bucket to download
    :type bucket: str, optional
    :param local_paths: local file path for download
    :type local_paths: str, optional
    :param recursive: download s3 directory
    :type recursive: bool, optional
    :param search_root: search from root
    :type search_root: bool, optional
    :param sync: use aws cli s3 sync
    :type sync: bool, optional
    :param exclude: glob patterns to exclude
    :type exclude: List[str], optional
    :param include: glob patterns to include
    :type include: List[str], optional
    :param hidden: include hidden files during search
    :type hidden: bool, optional
    :param version: download version object
    :type version: bool, optional
    """
    if not exclude:
        exclude = []
    if not include:
        include = []

    s3 = S3(profile)
    s3.set_bucket_and_path(bucket)
    if not s3.bucket_name:
        s3.set_s3_bucket()
    if recursive or sync:
        if not s3.path_list[0]:
            s3.set_s3_path(download=True)
    else:
        if not s3.path_list[0]:
            s3.set_s3_object(multi_select=True, version=version)

    obj_versions: List[Dict[str, str]] = []
    if version:
        obj_versions = s3.get_object_version()

    if not local_path:
        fzf = Pyfzf()
        local_path = str(
            fzf.get_local_file(search_from_root, directory=True,
                               hidden=hidden))

    if sync:
        sync_s3(
            exclude=exclude,
            include=include,
            from_path="s3://%s/%s" % (s3.bucket_name, s3.path_list[0]),
            to_path=local_path,
        )
    elif recursive:
        download_recusive(s3, exclude, include, local_path)

    elif version:
        download_version(s3, obj_versions, local_path)

    else:
        for s3_path in s3.path_list:
            destination_path = os.path.join(local_path,
                                            os.path.basename(s3_path))
            # due the fact without recursive flag s3.path_list[0] is set by s3.set_s3_object
            # the bucket_path is the valid s3 key so we don't need to call s3.get_s3_destination_key
            print("(dryrun) download: s3://%s/%s to %s" %
                  (s3.bucket_name, s3_path, destination_path))
        if get_confirmation("Confirm?"):
            for s3_path in s3.path_list:
                destination_path = os.path.join(local_path,
                                                os.path.basename(s3_path))
                print("download: s3://%s/%s to %s" %
                      (s3.bucket_name, s3_path, destination_path))
                transfer = S3TransferWrapper(s3.client)
                transfer.s3transfer.download_file(
                    s3.bucket_name,
                    s3_path,
                    destination_path,
                    callback=S3Progress(s3_path, s3.bucket_name, s3.client),
                )