def _set_policy(self, update: bool = False, search_from_root: bool = False) -> None: """Set the stack_policy for the stack. Used to prevent update on certain resources. :param update: determine if stack is updating, if true, set different args aws cloudformation takes StackPolicyBody for creation and StackPolicyDuringUpdateBody for update overwrite :type update: bool, optional :param search_from_root: search files from root :type search_from_root: bool, optional """ print(80 * "-") fzf = Pyfzf() file_path: str = str( fzf.get_local_file( search_from_root=search_from_root, cloudformation=True, empty_allow=True, header="select the policy document you would like to use", )) if not update and file_path: with open(str(file_path), "r") as body: body = body.read() self._extra_args["StackPolicyBody"] = body elif update and file_path: with open(str(file_path), "r") as body: body = body.read() self._extra_args["StackPolicyDuringUpdateBody"] = body
def create_stack( profile: Union[str, bool] = False, region: Union[str, bool] = False, local_path: Union[str, bool] = False, root: bool = False, wait: bool = False, extra: bool = False, bucket: str = None, version: Union[str, bool] = False, ) -> None: """Handle the creation of the cloudformation stack. :param profile: use a different profile for this operation :type profile: Union[bool, str], optional :param region: use a different region for this operation :type region: Union[bool, str], optional :param local_path: Select a template from local machine :type local_path: Union[bool, str], optional :param root: Search local file from root directory :type root: bool, optional :param wait: wait for stack to be completed before exiting the program :type wait: bool, optional :param extra: configure extra options for the stack, (tags, IAM, termination protection etc..) :type extra: bool, optional :param bucket: specify a bucket/bucketpath to skip s3 selection :type bucket: str, optional :param version: use a previous version of the template :type version: Union[bool, str], optional :raises NoNameEntered: when the new stack receive empty string as stack_name """ cloudformation = Cloudformation(profile, region) if local_path: if type(local_path) != str: fzf = Pyfzf() local_path = str( fzf.get_local_file(search_from_root=root, cloudformation=True)) cloudformation_args = construct_local_creation_args( cloudformation, str(local_path)) else: cloudformation_args = construct_s3_creation_args( cloudformation, bucket, version) if extra: extra_args = CloudformationArgs(cloudformation) extra_args.set_extra_args(search_from_root=root) cloudformation_args.update(extra_args.extra_args) response = cloudformation.execute_with_capabilities(**cloudformation_args) response.pop("ResponseMetadata", None) print(json.dumps(response, indent=4, default=str)) print(80 * "-") print("Stack creation initiated") if wait: cloudformation.stack_name = cloudformation_args["StackName"] cloudformation.wait("stack_create_complete", "Waiting for stack to be ready ...") print("Stack created")
def validate_stack( profile: Optional[Union[str, bool]] = False, region: Optional[Union[str, bool]] = False, local_path: Union[str, bool] = False, root: bool = False, bucket: str = None, version: Union[str, bool] = False, no_print: bool = False, ) -> None: """Validate the selected cloudformation template using boto3 api. This is also used internally by create_stack and update_stack operations. :param profile: Use a different profile for this operation :type profile: Union[bool, str], optional :param region: Use a different region for this operation :type region: Union[bool, str], optional :param local_path: Select a template from local machine :type local_path: Union[bool, str], optional :param root: Search local file from root directory :type root: bool, optional :param bucket: specify a bucket/bucketpath to skip s3 selection :type bucket: str, optional :param version: use a previous version of the template :type version: Union[bool, str], optional :param no_print: Don't print the response, only check excpetion :type no_print: bool, optional """ cloudformation = Cloudformation(profile, region) if local_path: if type(local_path) != str: fzf = Pyfzf() local_path = str( fzf.get_local_file( search_from_root=root, cloudformation=True, header="select a cloudformation template to validate", )) check_is_valid(local_path) with open(local_path, "r") as file_body: response = cloudformation.client.validate_template( TemplateBody=file_body.read()) else: s3 = S3(profile, region) s3.set_bucket_and_path(bucket) if not s3.bucket_name: s3.set_s3_bucket( header="select a bucket which contains the template") if not s3.path_list[0]: s3.set_s3_object() check_is_valid(s3.path_list[0]) if version == True: version = s3.get_object_version(s3.bucket_name, s3.path_list[0])[0].get( "VersionId", False) template_body_loacation = s3.get_object_url( "" if not version else str(version)) response = cloudformation.client.validate_template( TemplateURL=template_body_loacation) if not no_print: response.pop("ResponseMetadata", None) print(json.dumps(response, indent=4, default=str))
class TestPyfzf(unittest.TestCase): def setUp(self): fileloader = FileLoader() config_path = Path(__file__).resolve().parent.joinpath( "../data/fzfaws.yml") fileloader.load_config_file(config_path=str(config_path)) self.capturedOutput = io.StringIO() sys.stdout = self.capturedOutput self.fzf = Pyfzf() def tearDown(self): sys.stdout = sys.__stdout__ @patch("fzfaws.utils.pyfzf.sys") def test_constructor(self, mocked_sys): self.assertRegex( self.fzf.fzf_path, r".*/fzfaws.*/libs/fzf-[0-9]\.[0-9]+\.[0-9]-(linux|darwin)_(386|amd64)", ) self.assertEqual("", self.fzf.fzf_string) mocked_sys.maxsize = 4294967295 mocked_sys.platform = "linux" fzf = Pyfzf() self.assertRegex( fzf.fzf_path, r".*/fzfaws.*/libs/fzf-[0-9]\.[0-9]+\.[0-9]-linux_386") mocked_sys.maxsize = 42949672951 mocked_sys.platform = "darwin" fzf = Pyfzf() self.assertRegex( fzf.fzf_path, r".*/fzfaws.*/libs/fzf-[0-9]\.[0-9]+\.[0-9]-darwin_amd64") mocked_sys.maxsize = 42949672951 mocked_sys.platform = "windows" mocked_sys.exit.side_effect = sys.exit self.assertRaises(SystemExit, Pyfzf) self.assertEqual( self.capturedOutput.getvalue(), "fzfaws currently is only compatible with python3.6+ on MacOS or Linux\n", ) def test_append_fzf(self): self.fzf.fzf_string = "" self.fzf.append_fzf("hello\n") self.fzf.append_fzf("world\n") self.assertEqual("hello\nworld\n", self.fzf.fzf_string) def test_construct_fzf_command(self): cmd_list = self.fzf._construct_fzf_cmd() self.assertEqual( cmd_list[1:], [ "--ansi", "--expect=ctrl-c", "--color=dark", "--color=fg:-1,bg:-1,hl:#c678dd,fg+:#ffffff,bg+:-1,hl+:#c678dd", "--color=info:#98c379,prompt:#61afef,pointer:#e06c75,marker:#e5c07b,spinner:#61afef,header:#61afef", "--height", "100%", "--layout=reverse", "--border", "--cycle", "--bind=alt-a:toggle-all,alt-j:jump,alt-0:top,alt-s:toggle-sort", ], ) @patch.object(subprocess, "Popen") @patch.object(subprocess, "check_output") def test_execute_fzf(self, mocked_output, mocked_popen): mocked_output.return_value = b"hello" result = self.fzf.execute_fzf(print_col=1) self.assertEqual(result, "hello") mocked_output.assert_called_once() mocked_output.return_value = b"" self.assertRaises(NoSelectionMade, self.fzf.execute_fzf) mocked_output.return_value = b"" result = self.fzf.execute_fzf(empty_allow=True) self.assertEqual("", result) mocked_output.return_value = b"hello" result = self.fzf.execute_fzf(multi_select=True, print_col=1) self.assertEqual(result, ["hello"]) mocked_output.return_value = b"hello\nworld" result = self.fzf.execute_fzf(multi_select=True, print_col=1, preview="hello", header="foo boo") self.assertEqual(result, ["hello", "world"]) mocked_output.return_value = b"hello world\nfoo boo" result = self.fzf.execute_fzf(multi_select=True, print_col=0) self.assertEqual(result, ["hello world", "foo boo"]) @patch.object(subprocess, "Popen") @patch.object(subprocess, "check_output") def test_check_ctrl_c(self, mocked_output, mocked_popen): mocked_output.return_value = b"ctrl-c" self.assertRaises(KeyboardInterrupt, self.fzf.execute_fzf) mocked_output.return_value = b"hello world" try: result = self.fzf.execute_fzf() self.assertEqual(result, "world") except: self.fail("ctrl-c test failed, unexpected exception raise") @patch("fzfaws.utils.Pyfzf._check_fd") @patch.object(subprocess, "Popen") @patch.object(subprocess, "check_output") def test_get_local_file(self, mocked_output, mocked_popen, mocked_check): mocked_check.return_value = False mocked_output.return_value = b"" self.assertRaises(NoSelectionMade, self.fzf.get_local_file) mocked_popen.assert_called_with("find * -type f", shell=True, stderr=ANY, stdout=ANY) mocked_output.return_value = b"hello" result = self.fzf.get_local_file() self.assertEqual("hello", result) mocked_output.return_value = b"hello" result = self.fzf.get_local_file(multi_select=True) self.assertEqual(result, ["hello"]) mocked_output.return_value = b"hello\nworld\n" result = self.fzf.get_local_file(multi_select=True) self.assertEqual(result, ["hello", "world"]) result = self.fzf.get_local_file(directory=True, search_from_root=True) mocked_popen.assert_called_with( "echo \033[33m./\033[0m; find * -type d", shell=True, stderr=ANY, stdout=ANY) result = self.fzf.get_local_file(cloudformation=True) mocked_popen.assert_called_with( 'find * -type f -name "*.json" -o -name "*.yaml" -o -name "*.yml"', shell=True, stderr=ANY, stdout=ANY, ) mocked_output.reset_mock() mocked_check.return_value = True result = self.fzf.get_local_file(cloudformation=True, header="hello") mocked_popen.assert_called_with( "fd --type f --regex '(yaml|yml|json)$'", shell=True, stderr=ANY, stdout=ANY, ) mocked_output.assert_called_once() result = self.fzf.get_local_file(directory=True) mocked_popen.assert_called_with( "echo \033[33m./\033[0m; fd --type d", shell=True, stderr=ANY, stdout=ANY, ) result = self.fzf.get_local_file() mocked_popen.assert_called_with( "fd --type f", shell=True, stderr=ANY, stdout=ANY, ) @patch("fzfaws.utils.pyfzf.subprocess") def test_check_fd(self, mocked_subprocess): mocked_subprocess.run.return_value = True result = self.fzf._check_fd() self.assertEqual(result, True) mocked_subprocess.run.side_effect = Exception( subprocess.CalledProcessError) result = self.fzf._check_fd() self.assertEqual(result, False) def test_process_list(self): self.fzf.fzf_string = "" self.assertRaises(EmptyList, self.fzf.process_list, [], "123") self.fzf.fzf_string = "" self.fzf.process_list([], "123", "asfasd", "bbbb", empty_allow=True) test_list = [{"foo": 1, "boo": 2}, {"foo": "b"}] self.fzf.process_list(test_list, "foo") self.assertEqual(self.fzf.fzf_string, "foo: 1\nfoo: b\n") self.fzf.fzf_string = "" self.fzf.process_list(test_list, "boo") self.assertEqual(self.fzf.fzf_string, "boo: 2\nboo: None\n") self.fzf.fzf_string = "" self.fzf.process_list(test_list, "www") self.assertEqual(self.fzf.fzf_string, "www: None\nwww: None\n") self.fzf.fzf_string = "" self.fzf.process_list(test_list, "foo", "boo") self.assertEqual(self.fzf.fzf_string, "foo: 1 | boo: 2\nfoo: b | boo: None\n") @patch.object(Pyfzf, "execute_fzf") def test_format_selected_to_dict(self, mocked_execute): mocked_execute.return_value = "foo: 1 | boo: 2 | wtf: None" result = str(self.fzf.execute_fzf(print_col=0)) result = self.fzf.format_selected_to_dict(result) self.assertEqual(result, {"boo": "2", "foo": "1", "wtf": None})
def upload_s3( profile: bool = False, bucket: str = None, local_paths: Optional[Union[str, list]] = None, recursive: bool = False, hidden: bool = False, search_root: bool = False, sync: bool = False, exclude: Optional[List[str]] = None, include: Optional[List[str]] = None, extra_config: bool = False, ) -> None: """Upload local files/directories to s3. Upload through boto3 s3 client. Glob pattern exclude list are handled first then handle the include list. :param profile: profile to use for this operation :type profile: bool, optional :param bucket: specify bucket to upload :type bucket: str, optional :param local_paths: local file paths to upload :type local_paths: list, optional :param recursive: upload directory :type recursive: bool, optional :param hidden: include hidden files during search :type hidden: bool, optional :param search_root: search from root :type search_root: bool, optional :param sync: use aws cli s3 sync :type sync: bool, optional :param exclude: glob patterns to exclude :type exclude: List[str], optional :param include: glob patterns to include :type include: List[str], optional :param extra_config: configure extra settings during upload :type extra_config: bool, optional """ if not local_paths: local_paths = [] if not exclude: exclude = [] if not include: include = [] s3 = S3(profile) s3.set_bucket_and_path(bucket) if not s3.bucket_name: s3.set_s3_bucket() if not s3.path_list[0]: s3.set_s3_path() if not local_paths: fzf = Pyfzf() recursive = True if recursive or sync else False # don't allow multi_select for recursive operation multi_select = True if not recursive else False local_paths = fzf.get_local_file( search_from_root=search_root, directory=recursive, hidden=hidden, multi_select=multi_select, ) # get the first item from the array since recursive operation doesn't support multi_select # local_path is used for sync and recursive operation # local_paths is used for single file operation if isinstance(local_paths, list): local_path = str(local_paths[0]) else: local_path = str(local_paths) # construct extra argument extra_args = S3Args(s3) if extra_config: extra_args.set_extra_args(upload=True) if sync: sync_s3( exclude=exclude, include=include, from_path=local_path, to_path="s3://%s/%s" % (s3.bucket_name, s3.path_list[0]), ) elif recursive: recursive_upload(s3, local_path, exclude, include, extra_args) else: for filepath in local_paths: # get the formated s3 destination destination_key = s3.get_s3_destination_key(filepath) print("(dryrun) upload: %s to s3://%s/%s" % (filepath, s3.bucket_name, destination_key)) if get_confirmation("Confirm?"): for filepath in local_paths: destination_key = s3.get_s3_destination_key(filepath) print("upload: %s to s3://%s/%s" % (filepath, s3.bucket_name, destination_key)) transfer = S3TransferWrapper(s3.client) transfer.s3transfer.upload_file( filepath, s3.bucket_name, destination_key, callback=S3Progress(filepath), extra_args=extra_args.extra_args, )
def update_stack( profile: Optional[Union[str, bool]] = False, region: Optional[Union[str, bool]] = False, replace: bool = False, local_path: Union[str, bool] = False, root: bool = False, wait: bool = False, extra: bool = False, bucket: str = None, version: Union[str, bool] = False, dryrun: bool = False, cloudformation: Optional[Cloudformation] = None, ) -> Union[None, dict]: """Handle the update of cloudformation stacks. This is also used by changeset_stack to create its argument. The dryrun and cloudformation argument in the function is only used by changeset_stack. :param profile: use a different profile for this operation :type profile: Union[bool, str], optional :param region: use a different region for this operation :type region: Union[bool, str], optional :param replace: replace the template during update :type replace: bool, optional :param local_path: Select a template from local machine :type local_path: Union[bool, str], optional :param root: Search local file from root directory :type root: bool, optional :param wait: wait for stack to be completed before exiting the program :type wait: bool, optional :param extra: configure extra options for the stack, (tags, IAM, termination protection etc..) :type extra: bool, optional :param bucket: specify a bucket/bucketpath to skip s3 selection :type bucket: str, optional :param version: use a previous version of the template on s3 bucket :type version: Union[str, bool], optional :param dryrun: don't update, rather return update information, used for changeset_stack() :type dryrun: bool, optional :param cloudformation: a cloudformation instance, when calling from changeset_stack(), pass cloudformation in :type cloudformation: Cloudformation, optional :return: If dryrun is set, return all the update details as dict {'Parameters': value, 'Tags': value...} :rtype: Union[None, dict] """ if not cloudformation: cloudformation = Cloudformation(profile, region) cloudformation.set_stack() extra_args = CloudformationArgs(cloudformation) if not replace: # non replacing update, just update the parameter cloudformation_args = non_replacing_update(cloudformation) else: # replace existing template if local_path: # template provided in local machine if type(local_path) != str: fzf = Pyfzf() local_path = str( fzf.get_local_file(search_from_root=root, cloudformation=True)) cloudformation_args = local_replacing_update( cloudformation, str(local_path)) else: # template provided in s3 cloudformation_args = s3_replacing_update(cloudformation, bucket, version) if extra: extra_args.set_extra_args(update=True, search_from_root=root, dryrun=dryrun) cloudformation_args.update(extra_args.extra_args) if dryrun: return cloudformation_args response = cloudformation.execute_with_capabilities(**cloudformation_args) response.pop("ResponseMetadata", None) print(json.dumps(response, indent=4, default=str)) print(80 * "-") print("Stack update initiated") if wait: cloudformation.wait("stack_update_complete", "Wating for stack to be updated ...") print("Stack updated")