Ejemplo n.º 1
0
    def test_process_path_param(self, mocked_path, mocked_object):
        s3 = S3()
        result = process_path_param(bucket="kazhala-lol/",
                                    s3=s3,
                                    search_folder=True,
                                    version=False)
        self.assertEqual(result, ("kazhala-lol", "", [""]))
        mocked_path.assert_called_with()

        s3 = S3()
        result = process_path_param(bucket="kazhala-lol/hello.txt",
                                    s3=s3,
                                    search_folder=False,
                                    version=False)
        self.assertEqual(result, ("kazhala-lol", "hello.txt", ["hello.txt"]))
        mocked_object.assert_not_called()

        s3 = S3()
        s3.bucket_name = "lol"
        result = process_path_param(bucket="",
                                    s3=s3,
                                    search_folder=False,
                                    version=True)
        self.assertEqual(result, ("lol", "", [""]))
        mocked_object.assert_called_with(multi_select=True, version=True)
Ejemplo n.º 2
0
 def setUp(self):
     self.capturedOutput = io.StringIO()
     sys.stdout = self.capturedOutput
     s3 = S3()
     s3.bucket_name = "hello"
     s3.path_list = ["hello.json"]
     self.s3_args = S3Args(s3)
Ejemplo n.º 3
0
def update_object_version(
    s3: S3,
    allversion: bool = False,
    acl: bool = False,
    tagging: bool = False,
) -> None:
    """Update versions of object's attributes.

    Note: this operation only allow update of acl and tagging, because
    this won't introduce new objects.

    :param s3: S3 instance
    :type s3: S3
    :param allversion: update all versions?
    :type allversion: bool, optional
    :param acl: update acl
    :type acl: bool, optional
    :param tagging: update tagging
    :type tagging: bool, optional
    """
    obj_versions = s3.get_object_version(select_all=allversion)
    s3_args = S3Args(s3)
    s3_args.set_extra_args(acl, tagging, version=obj_versions)
    # check if only tags or acl is being updated
    # this way it won't create extra versions on the object
    check_result = s3_args.check_tag_acl()

    for obj_version in obj_versions:
        print(
            "(dryrun) update: s3://%s/%s with version %s"
            % (s3.bucket_name, obj_version.get("Key"), obj_version.get("VersionId"))
        )
    if get_confirmation("Confirm?"):
        for obj_version in obj_versions:
            print(
                "update: s3://%s/%s with version %s"
                % (
                    s3.bucket_name,
                    obj_version.get("Key"),
                    obj_version.get("VersionId"),
                )
            )
            if check_result:
                if check_result.get("Tags"):
                    s3.client.put_object_tagging(
                        Bucket=s3.bucket_name,
                        Key=obj_version.get("Key"),
                        VersionId=obj_version.get("VersionId"),
                        Tagging={"TagSet": check_result.get("Tags")},
                    )
                if check_result.get("Grants"):
                    grant_args = {
                        "Bucket": s3.bucket_name,
                        "Key": obj_version.get("Key"),
                        "VersionId": obj_version.get("VersionId"),
                    }
                    grant_args.update(check_result.get("Grants", {}))
                    s3.client.put_object_acl(**grant_args)
            else:
                print("Nothing to update")
Ejemplo n.º 4
0
 def setUp(self):
     self.capturedOutput = io.StringIO()
     sys.stdout = self.capturedOutput
     fileloader = FileLoader()
     config_path = Path(__file__).resolve().parent.joinpath(
         "../data/fzfaws.yml")
     fileloader.load_config_file(config_path=str(config_path))
     self.s3 = S3()
Ejemplo n.º 5
0
    def test_constructor(self):
        self.assertEqual(self.s3.profile, "default")
        self.assertEqual(self.s3.region, "us-east-1")
        self.assertEqual(self.s3.bucket_name, "")
        self.assertEqual(self.s3.path_list, [""])

        s3 = S3(profile="root", region="us-east-1")
        self.assertEqual(s3.profile, "root")
        self.assertEqual(s3.region, "us-east-1")
        self.assertEqual(s3.bucket_name, "")
        self.assertEqual(s3.path_list, [""])
Ejemplo n.º 6
0
def recursive_upload(s3: S3, local_path: str, exclude: List[str],
                     include: List[str], extra_args: S3Args) -> None:
    """Recursive upload local directory to s3.

    Perform a os.walk to upload everyfile under a directory.

    :param s3: S3 instance
    :type s3: S3
    :param local_path: local directory
    :type local_path: str
    :param exclude: glob pattern to exclude
    :type exclude: List[str]
    :param include: glob pattern to include
    :type include: List[str]
    :param extra_args: S3Args instance to set extra argument
    :type extra_args: S3Args
    """
    upload_list: List[Dict[str, str]] = []
    for root, _, files in os.walk(local_path):
        for filename in files:
            full_path = os.path.join(root, filename)
            relative_path = os.path.relpath(full_path, local_path)

            if not exclude_file(exclude, include, relative_path):
                destination_key = s3.get_s3_destination_key(relative_path,
                                                            recursive=True)
                print("(dryrun) upload: %s to s3://%s/%s" %
                      (relative_path, s3.bucket_name, destination_key))
                upload_list.append({
                    "local_path": full_path,
                    "bucket": s3.bucket_name,
                    "key": destination_key,
                    "relative": relative_path,
                })

    if get_confirmation("Confirm?"):
        for item in upload_list:
            print("upload: %s to s3://%s/%s" %
                  (item["relative"], item["bucket"], item["key"]))
            transfer = S3TransferWrapper(s3.client)
            transfer.s3transfer.upload_file(
                item["local_path"],
                item["bucket"],
                item["key"],
                callback=S3Progress(item["local_path"]),
                extra_args=extra_args.extra_args,
            )
Ejemplo n.º 7
0
    def test_get_copy_args_with_version(self):
        data_path1 = os.path.join(os.path.dirname(os.path.abspath(__file__)),
                                  "../data/s3_obj.json")
        with open(data_path1, "r") as file:
            response1 = json.load(file)
        data_path2 = os.path.join(os.path.dirname(os.path.abspath(__file__)),
                                  "../data/s3_acl.json")
        with open(data_path2, "r") as file:
            response2 = json.load(file)

        # with version
        s3_client = boto3.client("s3")
        stubber = Stubber(s3_client)
        stubber.add_response("get_object", response1)
        stubber.add_response("get_object_acl", response2)
        stubber.activate()
        s3 = S3()
        s3._client = s3_client
        s3.bucket_name = "hello"
        s3_args = S3Args(s3)
        result = get_copy_args(s3, "hello.json", s3_args, False)
        self.assertEqual(
            result,
            {
                "Bucket": "hello",
                "Key": "hello.json",
                "CopySource": {
                    "Bucket": "hello",
                    "Key": "hello.json"
                },
                "StorageClass": "REDUCED_REDUNDANCY",
                "ServerSideEncryption": "aws:kms",
                "SSEKMSKeyId":
                "arn:aws:kms:ap-southeast-2:11111111:key/11111111-f48d-48b8-90d4-d5bd03a603d4",
                "GrantRead":
                "uri=http://acs.amazonaws.com/groups/global/AllUsers",
            },
        )
Ejemplo n.º 8
0
def construct_s3_creation_args(cloudformation: Cloudformation,
                               bucket: Optional[str],
                               version: Union[str, bool]) -> Dict[str, Any]:
    """Construct cloudformation argument for template in s3.

    Retrieve the template from s3 bucket and validate and process the content in it
    then return the ready to use cloudformation argument for boto3.

    :param cloudformation: Cloudformation instance
    :type cloudformation: Cloudformation
    :param bucket: bucket name
    :type bucket: 
    :return: return the formated cloudformation argument thats ready to use by boto3
    :rtype: Dict[str, Any]
    """
    s3 = S3(cloudformation.profile, cloudformation.region)
    s3.set_bucket_and_path(bucket)
    if not s3.bucket_name:
        s3.set_s3_bucket(header="select a bucket which contains the template")
    if not s3.path_list[0]:
        s3.set_s3_object()

    # check file type is yaml or json
    check_is_valid(s3.path_list[0])

    # if version is requested but not set through cmd line, get user to select a version
    if version == True:
        version = s3.get_object_version(s3.bucket_name,
                                        s3.path_list[0])[0].get(
                                            "VersionId", False)

    # validate the template through boto3
    validate_stack(
        cloudformation.profile,
        cloudformation.region,
        bucket="%s/%s" % (s3.bucket_name, s3.path_list[0]),
        version=version if version else False,
        no_print=True,
    )

    file_type: str = ""
    if is_yaml(s3.path_list[0]):
        file_type = "yaml"
    elif is_json(s3.path_list[0]):
        file_type = "json"

    stack_name: str = input("StackName: ")
    if not stack_name:
        raise NoNameEntered("No stack name specified")

    file_data: dict = s3.get_object_data(file_type)
    if "Parameters" in file_data:
        paramprocessor = ParamProcessor(cloudformation.profile,
                                        cloudformation.region,
                                        file_data["Parameters"])
        paramprocessor.process_stack_params()
        create_parameters = paramprocessor.processed_params
    else:
        create_parameters = []

    template_body_loacation: str = s3.get_object_url(
        version="" if not version else str(version))
    cloudformation_args = {
        "cloudformation_action": cloudformation.client.create_stack,
        "StackName": stack_name,
        "TemplateURL": template_body_loacation,
        "Parameters": create_parameters,
    }

    return cloudformation_args
Ejemplo n.º 9
0
def validate_stack(
    profile: Optional[Union[str, bool]] = False,
    region: Optional[Union[str, bool]] = False,
    local_path: Union[str, bool] = False,
    root: bool = False,
    bucket: str = None,
    version: Union[str, bool] = False,
    no_print: bool = False,
) -> None:
    """Validate the selected cloudformation template using boto3 api.

    This is also used internally by create_stack and update_stack
    operations.

    :param profile: Use a different profile for this operation
    :type profile: Union[bool, str], optional
    :param region: Use a different region for this operation
    :type region: Union[bool, str], optional
    :param local_path: Select a template from local machine
    :type local_path: Union[bool, str], optional
    :param root: Search local file from root directory
    :type root: bool, optional
    :param bucket: specify a bucket/bucketpath to skip s3 selection
    :type bucket: str, optional
    :param version: use a previous version of the template
    :type version: Union[bool, str], optional
    :param no_print: Don't print the response, only check excpetion
    :type no_print: bool, optional
    """
    cloudformation = Cloudformation(profile, region)

    if local_path:
        if type(local_path) != str:
            fzf = Pyfzf()
            local_path = str(
                fzf.get_local_file(
                    search_from_root=root,
                    cloudformation=True,
                    header="select a cloudformation template to validate",
                ))
        check_is_valid(local_path)
        with open(local_path, "r") as file_body:
            response = cloudformation.client.validate_template(
                TemplateBody=file_body.read())
    else:
        s3 = S3(profile, region)
        s3.set_bucket_and_path(bucket)
        if not s3.bucket_name:
            s3.set_s3_bucket(
                header="select a bucket which contains the template")
        if not s3.path_list[0]:
            s3.set_s3_object()

        check_is_valid(s3.path_list[0])

        if version == True:
            version = s3.get_object_version(s3.bucket_name,
                                            s3.path_list[0])[0].get(
                                                "VersionId", False)

        template_body_loacation = s3.get_object_url(
            "" if not version else str(version))
        response = cloudformation.client.validate_template(
            TemplateURL=template_body_loacation)

    if not no_print:
        response.pop("ResponseMetadata", None)
        print(json.dumps(response, indent=4, default=str))
Ejemplo n.º 10
0
def update_object_name(s3: S3, version: bool = False) -> None:
    """Update object name.

    :param s3: S3 class instance
    :type s3: S3
    :param version: whether to rename version's name, this will create a new object
    :type version: bool, optional
    """
    print(
        "Enter the new name below (format: newname or path/newname for a new path)"
    )
    new_name = input("Name(Orignal: %s): " % s3.path_list[0])

    if not version:
        print("(dryrun) rename: s3://%s/%s to s3://%s/%s" %
              (s3.bucket_name, s3.path_list[0], s3.bucket_name, new_name))
        if get_confirmation("Confirm?"):
            print("rename: s3://%s/%s to s3://%s/%s" %
                  (s3.bucket_name, s3.path_list[0], s3.bucket_name, new_name))
            # initialise empty s3_args so that get_copy_args will use all the original value
            s3_args = S3Args(s3)
            copy_object_args = get_copy_args(s3,
                                             s3.path_list[0],
                                             s3_args,
                                             extra_args=True)
            copy_source = {
                "Bucket": s3.bucket_name,
                "Key": s3.path_list[0],
            }
            s3transferwrapper = S3TransferWrapper()
            s3.client.copy(
                copy_source,
                s3.bucket_name,
                new_name,
                Callback=S3Progress(s3.path_list[0], s3.bucket_name,
                                    s3.client),
                ExtraArgs=copy_object_args,
                Config=s3transferwrapper.transfer_config,
            )
            s3.client.delete_object(
                Bucket=s3.bucket_name,
                Key=s3.path_list[0],
            )

    else:
        # get version
        obj_version = s3.get_object_version(key=s3.path_list[0])[0]
        print("(dryrun) rename: s3://%s/%s to s3://%s/%s with version %s" % (
            s3.bucket_name,
            obj_version.get("Key"),
            s3.bucket_name,
            new_name,
            obj_version.get("VersionId"),
        ))
        if get_confirmation("Confirm?"):
            print("rename: s3://%s/%s to s3://%s/%s with version %s" % (
                s3.bucket_name,
                obj_version.get("Key"),
                s3.bucket_name,
                new_name,
                obj_version.get("VersionId"),
            ))
            # initialise empty s3_args so that get_copy_args will use all the original value
            s3_args = S3Args(s3)
            copy_object_args = get_copy_args(
                s3,
                s3.path_list[0],
                s3_args,
                extra_args=True,
                version=obj_version.get("VersionId"),
            )
            copy_source = {
                "Bucket": s3.bucket_name,
                "Key": obj_version.get("Key"),
                "VersionId": obj_version.get("VersionId"),
            }
            s3transferwrapper = S3TransferWrapper()
            s3.client.copy(
                copy_source,
                s3.bucket_name,
                new_name,
                Callback=S3Progress(
                    obj_version.get("Key", ""),
                    s3.bucket_name,
                    s3.client,
                    version_id=obj_version.get("VersionId"),
                ),
                ExtraArgs=copy_object_args,
                Config=s3transferwrapper.transfer_config,
            )
Ejemplo n.º 11
0
def object_s3(
    profile: Union[str, bool] = False,
    bucket: str = None,
    recursive: bool = False,
    version: bool = False,
    allversion: bool = False,
    exclude: Optional[List[str]] = None,
    include: Optional[List[str]] = None,
    name: bool = False,
    storage: bool = False,
    encryption: bool = False,
    metadata: bool = False,
    tagging: bool = False,
    acl: bool = False,
) -> None:
    """Update selected object settings.

    Display a menu based on recursive and version requirement.
    If name is true, only handle rename.

    :param profile: use a different profile for this operation
    :type profile: Union[str, bool], optional
    :param bucket: bucket name that contains the object
    :type bucket: str, optional
    :param recursive: recursive update object attr
    :type recursive: bool, optional
    :param allversion: update all versions of a object
    :type allversion: bool, optional
    :param exclude: glob pattern to exclude
    :type exclude: List[str], optional
    :param include: glob pattern to include
    :type include: List[str], optional
    :param name: update name
    :type name: bool, optional
    :param storage: update storage
    :type storage: bool, optional
    :param encryption: update encryption
    :type encryption: bool, optional
    :param metadata: update metadata
    :type metadata: bool, optional
    :param tagging: update tagging
    :type tagging: bool, optional
    :param acl: update acl
    :type acl: bool, optional
    """
    if exclude is None:
        exclude = []
    if include is None:
        include = []

    if allversion:
        version = True

    s3 = S3(profile)
    s3.set_bucket_and_path(bucket)
    if not s3.bucket_name:
        s3.set_s3_bucket()
    if recursive and not s3.path_list[0]:
        s3.set_s3_path()
    elif name and not s3.path_list[0]:
        s3.set_s3_object(version)
    elif not s3.path_list[0]:
        s3.set_s3_object(version, multi_select=True)

    # handle rename
    if name:
        update_object_name(s3, version)

    elif recursive:
        update_object_recursive(s3, storage, acl, metadata, encryption,
                                tagging, exclude, include)

    elif version:
        update_object_version(s3, allversion, acl, tagging)

    else:
        # update single object
        s3_args = S3Args(s3)
        s3_args.set_extra_args(storage, acl, metadata, encryption, tagging)
        # check if only tags or acl is being updated
        # this way it won't create extra versions on the object, if versioning is enabled
        check_result = s3_args.check_tag_acl()

        for s3_key in s3.path_list:
            print("(dryrun) update: s3://%s/%s" % (s3.bucket_name, s3_key))
        if get_confirmation("Confirm?"):
            for s3_key in s3.path_list:
                print("update: s3://%s/%s" % (s3.bucket_name, s3_key))
                if check_result:
                    if check_result.get("Tags"):
                        s3.client.put_object_tagging(
                            Bucket=s3.bucket_name,
                            Key=s3_key,
                            Tagging={"TagSet": check_result.get("Tags")},
                        )
                    if check_result.get("Grants"):
                        grant_args = {"Bucket": s3.bucket_name, "Key": s3_key}
                        grant_args.update(check_result.get("Grants", {}))
                        s3.client.put_object_acl(**grant_args)

                else:
                    # Note: this will create new version if version is enabled
                    copy_object_args = get_copy_args(s3,
                                                     s3_key,
                                                     s3_args,
                                                     extra_args=True)
                    copy_source = {"Bucket": s3.bucket_name, "Key": s3_key}
                    s3transferwrapper = S3TransferWrapper()
                    s3.client.copy(
                        copy_source,
                        s3.bucket_name,
                        s3_key,
                        Callback=S3Progress(s3_key, s3.bucket_name, s3.client),
                        ExtraArgs=copy_object_args,
                        Config=s3transferwrapper.transfer_config,
                    )
Ejemplo n.º 12
0
def upload_s3(
    profile: bool = False,
    bucket: str = None,
    local_paths: Optional[Union[str, list]] = None,
    recursive: bool = False,
    hidden: bool = False,
    search_root: bool = False,
    sync: bool = False,
    exclude: Optional[List[str]] = None,
    include: Optional[List[str]] = None,
    extra_config: bool = False,
) -> None:
    """Upload local files/directories to s3.

    Upload through boto3 s3 client.
    Glob pattern exclude list are handled first then handle the include list.

    :param profile: profile to use for this operation
    :type profile: bool, optional
    :param bucket: specify bucket to upload
    :type bucket: str, optional
    :param local_paths: local file paths to upload
    :type local_paths: list, optional
    :param recursive: upload directory
    :type recursive: bool, optional
    :param hidden: include hidden files during search
    :type hidden: bool, optional
    :param search_root: search from root
    :type search_root: bool, optional
    :param sync: use aws cli s3 sync
    :type sync: bool, optional
    :param exclude: glob patterns to exclude
    :type exclude: List[str], optional
    :param include: glob patterns to include
    :type include: List[str], optional
    :param extra_config: configure extra settings during upload
    :type extra_config: bool, optional
    """
    if not local_paths:
        local_paths = []
    if not exclude:
        exclude = []
    if not include:
        include = []

    s3 = S3(profile)
    s3.set_bucket_and_path(bucket)
    if not s3.bucket_name:
        s3.set_s3_bucket()
    if not s3.path_list[0]:
        s3.set_s3_path()

    if not local_paths:
        fzf = Pyfzf()
        recursive = True if recursive or sync else False
        # don't allow multi_select for recursive operation
        multi_select = True if not recursive else False
        local_paths = fzf.get_local_file(
            search_from_root=search_root,
            directory=recursive,
            hidden=hidden,
            multi_select=multi_select,
        )

    # get the first item from the array since recursive operation doesn't support multi_select
    # local_path is used for sync and recursive operation
    # local_paths is used for single file operation
    if isinstance(local_paths, list):
        local_path = str(local_paths[0])
    else:
        local_path = str(local_paths)

    # construct extra argument
    extra_args = S3Args(s3)
    if extra_config:
        extra_args.set_extra_args(upload=True)

    if sync:
        sync_s3(
            exclude=exclude,
            include=include,
            from_path=local_path,
            to_path="s3://%s/%s" % (s3.bucket_name, s3.path_list[0]),
        )

    elif recursive:
        recursive_upload(s3, local_path, exclude, include, extra_args)

    else:
        for filepath in local_paths:
            # get the formated s3 destination
            destination_key = s3.get_s3_destination_key(filepath)
            print("(dryrun) upload: %s to s3://%s/%s" %
                  (filepath, s3.bucket_name, destination_key))

        if get_confirmation("Confirm?"):
            for filepath in local_paths:
                destination_key = s3.get_s3_destination_key(filepath)
                print("upload: %s to s3://%s/%s" %
                      (filepath, s3.bucket_name, destination_key))
                transfer = S3TransferWrapper(s3.client)
                transfer.s3transfer.upload_file(
                    filepath,
                    s3.bucket_name,
                    destination_key,
                    callback=S3Progress(filepath),
                    extra_args=extra_args.extra_args,
                )
Ejemplo n.º 13
0
def s3_replacing_update(cloudformation: Cloudformation, bucket: Optional[str],
                        version: Union[str, bool]) -> Dict[str, Any]:
    """Format argument for a replacing updating through providing template on s3.

    Read the template from s3, comparing parameter names with the original stack
    to provide a preview of value if possible.

    :param cloudformation: Cloudformation instance
    :type cloudformation: Cloudformation
    :param bucket: bucket path, if set, skip fzf selection
    :type bucket: str, optional
    :param version: whether to use a versioned template in s3
    :type version: Union[str, bool]
    :return: formatted argument thats ready to be used by boto3
    :rtype: Dict[str, Any]
    """
    s3 = S3(profile=cloudformation.profile, region=cloudformation.region)
    s3.set_bucket_and_path(bucket)
    if not s3.bucket_name:
        s3.set_s3_bucket()
    if not s3.path_list[0]:
        s3.set_s3_object()

    check_is_valid(s3.path_list[0])

    if version == True:
        version = s3.get_object_version(s3.bucket_name,
                                        s3.path_list[0])[0].get(
                                            "VersionId", False)

    validate_stack(
        cloudformation.profile,
        cloudformation.region,
        bucket="%s/%s" % (s3.bucket_name, s3.path_list[0]),
        version=version if version else False,
        no_print=True,
    )

    file_type: str = ""
    if is_yaml(s3.path_list[0]):
        file_type = "yaml"
    elif is_json(s3.path_list[0]):
        file_type = "json"

    file_data: Dict[str, Any] = s3.get_object_data(file_type)
    if "Parameters" in file_data:
        paramprocessor = ParamProcessor(
            cloudformation.profile,
            cloudformation.region,
            file_data["Parameters"],
            cloudformation.stack_details.get("Parameters"),
        )
        paramprocessor.process_stack_params()
        updated_parameters = paramprocessor.processed_params
    else:
        updated_parameters = []

    template_body_loacation = s3.get_object_url(
        "" if not version else str(version))

    cloudformation_args = {
        "cloudformation_action": cloudformation.client.update_stack,
        "StackName": cloudformation.stack_name,
        "TemplateURL": template_body_loacation,
        "UsePreviousTemplate": False,
        "Parameters": updated_parameters,
    }

    return cloudformation_args