Esempio n. 1
0
    def test_extra(self, mocked_popen, mocked_confirmation):
        mocked_confirmation.return_value = True
        attrs = {"communicate.return_value": ("output", "error")}
        mocked_popen.configure_mock(**attrs)
        sync_s3(["lol"], ["foo", "boo"], "tmp", "s3://yes")
        mocked_popen.assert_called_with([
            "aws",
            "s3",
            "sync",
            "tmp",
            "s3://yes",
            "--exclude",
            "lol",
            "--include",
            "foo",
            "boo",
        ])

        sync_s3(["lol"], [], "tmp", "s3://yes")
        mocked_popen.assert_called_with([
            "aws",
            "s3",
            "sync",
            "tmp",
            "s3://yes",
            "--exclude",
            "lol",
        ])
Esempio n. 2
0
    def test_no_extra(self, mocked_popen, mocked_confirmation):
        mocked_confirmation.return_value = True
        attrs = {"communicate.return_value": ("output", "error")}
        mocked_popen.configure_mock(**attrs)
        sync_s3(from_path="hello/world", to_path="s3://hello")
        mocked_popen.assert_called_with([
            "aws",
            "s3",
            "sync",
            "hello/world",
            "s3://hello",
        ])

        self.assertRaises(InvalidS3PathPattern, sync_s3)
Esempio n. 3
0
def bucket_s3(
    profile: bool = False,
    from_bucket: str = None,
    to_bucket: str = None,
    recursive: bool = False,
    sync: bool = False,
    exclude: Optional[List[str]] = None,
    include: Optional[List[str]] = None,
    version: bool = False,
    preserve: bool = False,
) -> None:
    """Transfer file between buckets.

    Handle transfer file between buckets or even within the same bucket.
    Handle glob pattern through exclude list first than it will process the include to explicit include files.

    :param profile: use a different profile for this operation
    :type profile: str, optional
    :param from_bucket: source bucket
    :type from_bucket: str, optional
    :param to_bucket: destination bucket
    :type to_bucket: str, optional
    :param recursive: recursive copy a folder
    :type recursive: bool, optional
    :param sync: sync s3 buckets
    :type sync: bool, optional
    :param exclude: glob patterns to exclude
    :type exclude: List[str], optional
    :param include: glob patterns to include
    :type include: List[str], optional
    :param version: move object verions
    :type version: bool, optional
    :param perserve: save all object's config instead of using the new bucket's settings
    :type perserve: bool, optional
    """
    if exclude is None:
        exclude = []
    if include is None:
        include = []

    s3 = S3(profile)

    # initialise variables to avoid directly using s3 instance since processing 2 buckets
    target_bucket: str = ""
    target_path: str = ""
    target_path_list: List[str] = []
    dest_bucket: str = ""
    dest_path = ""
    obj_versions: List[Dict[str, str]] = []

    search_folder: bool = True if recursive or sync else False

    if from_bucket:
        target_bucket, target_path, target_path_list = process_path_param(
            from_bucket, s3, search_folder, version=version)
    else:
        s3.set_s3_bucket(
            header="set the source bucket which contains the file to transfer")
        target_bucket = s3.bucket_name
        if search_folder:
            s3.set_s3_path()
            target_path = s3.path_list[0]
        else:
            s3.set_s3_object(multi_select=True, version=version)
            target_path_list = s3.path_list[:]
    if version and not search_folder:
        obj_versions = s3.get_object_version()
    # clean up the s3 attributes for next operation
    s3.bucket_name = ""
    s3.path_list[0] = ""

    if to_bucket:
        dest_bucket, dest_path, _ = process_path_param(to_bucket, s3, True)
    else:
        s3.set_s3_bucket(
            header=
            "set the destination bucket where the file should be transfered")
        s3.set_s3_path()
        dest_bucket = s3.bucket_name
        dest_path = s3.path_list[0]

    if sync:
        sync_s3(
            exclude,
            include,
            "s3://%s/%s" % (target_bucket, target_path),
            "s3://%s/%s" % (dest_bucket, dest_path),
        )
    elif recursive:
        recursive_copy(
            s3,
            target_bucket,
            target_path,
            dest_bucket,
            dest_path,
            exclude,
            include,
            preserve,
        )

    elif version:
        copy_version(
            s3,
            dest_bucket,
            dest_path,
            obj_versions,
            target_bucket,
            target_path,
            preserve,
        )

    else:
        # set the s3 instance name and path the destination bucket
        s3.bucket_name = dest_bucket
        s3.path_list[0] = dest_path
        for target_path in target_path_list:
            # process the target key path and get the destination key path
            s3_key = s3.get_s3_destination_key(target_path)
            print("(dryrun) copy: s3://%s/%s to s3://%s/%s" %
                  (target_bucket, target_path, dest_bucket, s3_key))
        if get_confirmation("Confirm?"):
            for target_path in target_path_list:
                s3_key = s3.get_s3_destination_key(target_path)
                print("copy: s3://%s/%s to s3://%s/%s" %
                      (target_bucket, target_path, dest_bucket, s3_key))
                copy_source = {"Bucket": target_bucket, "Key": target_path}
                if not preserve:
                    s3transferwrapper = S3TransferWrapper()
                    s3.client.copy(
                        copy_source,
                        dest_bucket,
                        s3_key,
                        Callback=S3Progress(target_path, target_bucket,
                                            s3.client),
                        Config=s3transferwrapper.transfer_config,
                    )
                else:
                    s3.bucket_name = target_bucket
                    copy_and_preserve(s3, target_bucket, target_path,
                                      dest_bucket, s3_key)
Esempio n. 4
0
def upload_s3(
    profile: bool = False,
    bucket: str = None,
    local_paths: Optional[Union[str, list]] = None,
    recursive: bool = False,
    hidden: bool = False,
    search_root: bool = False,
    sync: bool = False,
    exclude: Optional[List[str]] = None,
    include: Optional[List[str]] = None,
    extra_config: bool = False,
) -> None:
    """Upload local files/directories to s3.

    Upload through boto3 s3 client.
    Glob pattern exclude list are handled first then handle the include list.

    :param profile: profile to use for this operation
    :type profile: bool, optional
    :param bucket: specify bucket to upload
    :type bucket: str, optional
    :param local_paths: local file paths to upload
    :type local_paths: list, optional
    :param recursive: upload directory
    :type recursive: bool, optional
    :param hidden: include hidden files during search
    :type hidden: bool, optional
    :param search_root: search from root
    :type search_root: bool, optional
    :param sync: use aws cli s3 sync
    :type sync: bool, optional
    :param exclude: glob patterns to exclude
    :type exclude: List[str], optional
    :param include: glob patterns to include
    :type include: List[str], optional
    :param extra_config: configure extra settings during upload
    :type extra_config: bool, optional
    """
    if not local_paths:
        local_paths = []
    if not exclude:
        exclude = []
    if not include:
        include = []

    s3 = S3(profile)
    s3.set_bucket_and_path(bucket)
    if not s3.bucket_name:
        s3.set_s3_bucket()
    if not s3.path_list[0]:
        s3.set_s3_path()

    if not local_paths:
        fzf = Pyfzf()
        recursive = True if recursive or sync else False
        # don't allow multi_select for recursive operation
        multi_select = True if not recursive else False
        local_paths = fzf.get_local_file(
            search_from_root=search_root,
            directory=recursive,
            hidden=hidden,
            multi_select=multi_select,
        )

    # get the first item from the array since recursive operation doesn't support multi_select
    # local_path is used for sync and recursive operation
    # local_paths is used for single file operation
    if isinstance(local_paths, list):
        local_path = str(local_paths[0])
    else:
        local_path = str(local_paths)

    # construct extra argument
    extra_args = S3Args(s3)
    if extra_config:
        extra_args.set_extra_args(upload=True)

    if sync:
        sync_s3(
            exclude=exclude,
            include=include,
            from_path=local_path,
            to_path="s3://%s/%s" % (s3.bucket_name, s3.path_list[0]),
        )

    elif recursive:
        recursive_upload(s3, local_path, exclude, include, extra_args)

    else:
        for filepath in local_paths:
            # get the formated s3 destination
            destination_key = s3.get_s3_destination_key(filepath)
            print("(dryrun) upload: %s to s3://%s/%s" %
                  (filepath, s3.bucket_name, destination_key))

        if get_confirmation("Confirm?"):
            for filepath in local_paths:
                destination_key = s3.get_s3_destination_key(filepath)
                print("upload: %s to s3://%s/%s" %
                      (filepath, s3.bucket_name, destination_key))
                transfer = S3TransferWrapper(s3.client)
                transfer.s3transfer.upload_file(
                    filepath,
                    s3.bucket_name,
                    destination_key,
                    callback=S3Progress(filepath),
                    extra_args=extra_args.extra_args,
                )
Esempio n. 5
0
def download_s3(
    profile: Union[str, bool] = False,
    bucket: str = None,
    local_path: str = None,
    recursive: bool = False,
    search_from_root: bool = False,
    sync: bool = False,
    exclude: Optional[List[str]] = None,
    include: Optional[List[str]] = None,
    hidden: bool = False,
    version: bool = False,
) -> None:
    """Download files/'directory' from s3.

    Handles sync, download file and download recursive from a s3 bucket.
    Glob pattern are first handled through exclude list and then include list.

    :param profile: profile to use for this operation
    :type profile: bool, optional
    :param bucket: specify bucket to download
    :type bucket: str, optional
    :param local_paths: local file path for download
    :type local_paths: str, optional
    :param recursive: download s3 directory
    :type recursive: bool, optional
    :param search_root: search from root
    :type search_root: bool, optional
    :param sync: use aws cli s3 sync
    :type sync: bool, optional
    :param exclude: glob patterns to exclude
    :type exclude: List[str], optional
    :param include: glob patterns to include
    :type include: List[str], optional
    :param hidden: include hidden files during search
    :type hidden: bool, optional
    :param version: download version object
    :type version: bool, optional
    """
    if not exclude:
        exclude = []
    if not include:
        include = []

    s3 = S3(profile)
    s3.set_bucket_and_path(bucket)
    if not s3.bucket_name:
        s3.set_s3_bucket()
    if recursive or sync:
        if not s3.path_list[0]:
            s3.set_s3_path(download=True)
    else:
        if not s3.path_list[0]:
            s3.set_s3_object(multi_select=True, version=version)

    obj_versions: List[Dict[str, str]] = []
    if version:
        obj_versions = s3.get_object_version()

    if not local_path:
        fzf = Pyfzf()
        local_path = str(
            fzf.get_local_file(search_from_root, directory=True,
                               hidden=hidden))

    if sync:
        sync_s3(
            exclude=exclude,
            include=include,
            from_path="s3://%s/%s" % (s3.bucket_name, s3.path_list[0]),
            to_path=local_path,
        )
    elif recursive:
        download_recusive(s3, exclude, include, local_path)

    elif version:
        download_version(s3, obj_versions, local_path)

    else:
        for s3_path in s3.path_list:
            destination_path = os.path.join(local_path,
                                            os.path.basename(s3_path))
            # due the fact without recursive flag s3.path_list[0] is set by s3.set_s3_object
            # the bucket_path is the valid s3 key so we don't need to call s3.get_s3_destination_key
            print("(dryrun) download: s3://%s/%s to %s" %
                  (s3.bucket_name, s3_path, destination_path))
        if get_confirmation("Confirm?"):
            for s3_path in s3.path_list:
                destination_path = os.path.join(local_path,
                                                os.path.basename(s3_path))
                print("download: s3://%s/%s to %s" %
                      (s3.bucket_name, s3_path, destination_path))
                transfer = S3TransferWrapper(s3.client)
                transfer.s3transfer.download_file(
                    s3.bucket_name,
                    s3_path,
                    destination_path,
                    callback=S3Progress(s3_path, s3.bucket_name, s3.client),
                )