def get_object_data(self, file_type: str = "") -> Dict[str, Any]: """Read the s3 object. Read the s3 object file and if is yaml/json file_type, load the file into dict currently is only used for cloudformation. :param file_type: type of file to process, supported value: yaml/json :type file_type: str :return: processed dict of json or yaml :raises InvalidFileType: when the file_type is invalid :rtype: Dict[str, Any] """ with Spinner.spin(message="Reading file from s3 ..."): s3_object = self.resource.Object(self.bucket_name, self.path_list[0]) body = s3_object.get()["Body"].read() body_dict: Dict[str, Any] = {} fileloader = FileLoader(body=body) if file_type == "yaml": body_dict = fileloader.process_yaml_body() elif file_type == "json": body_dict = fileloader.process_json_body() else: raise InvalidFileType return body_dict
def setUp(self): self.fileloader = FileLoader() curr_path = os.path.dirname(os.path.abspath(__file__)) self.test_json = tempfile.NamedTemporaryFile() with open(self.test_json.name, "w") as file: file.write(json.dumps({"hello": "world", "foo": "boo"})) self.test_yaml = os.path.join(curr_path, "../data/fzfaws.yml")
def setUp(self): fileloader = FileLoader() config_path = Path(__file__).resolve().parent.joinpath( "../data/fzfaws.yml") fileloader.load_config_file(config_path=str(config_path)) capturedOutput = io.StringIO() sys.stdout = capturedOutput self.kms = KMS()
def setUp(self): self.capturedOutput = io.StringIO() sys.stdout = self.capturedOutput self.cloudformation = Cloudformation() fileloader = FileLoader() config_path = Path(__file__).resolve().parent.joinpath( "../data/fzfaws.yml") fileloader.load_config_file(config_path=str(config_path))
def test_constructor(self): fileloader = FileLoader() config_path = Path(__file__).resolve().parent.joinpath( "../data/fzfaws.yml") fileloader.load_config_file(config_path=str(config_path)) transfer = S3TransferWrapper(boto3.client("s3")) self.assertEqual( transfer.s3transfer._manager.config.num_download_attempts, 6) self.assertEqual(transfer.transfer_config.num_download_attempts, 6)
def construct_local_creation_args(cloudformation: Cloudformation, local_path: str) -> Dict[str, Any]: """Construct cloudformation create argument for local file. Perform fzf search on local files json/yaml and then use validate_stack to validate stack through boto3 API before constructing the argument. :param cloudformation: Cloudformation instance :type cloudformation: Cloudformation :param local_path: local file path :type local_path: str :return: return the constructed args thats ready for use with boto3 :rtype: Dict[str, Any] """ # validate file type, has to be either yaml or json check_is_valid(local_path) validate_stack( cloudformation.profile, cloudformation.region, local_path=local_path, no_print=True, ) stack_name: str = input("StackName: ") if not stack_name: raise NoNameEntered("No stack name specified") fileloader = FileLoader(path=local_path) file_data: Dict[str, Any] = {} if is_yaml(local_path): file_data = fileloader.process_yaml_file() elif is_json(local_path): file_data = fileloader.process_json_file() # get params if "Parameters" in file_data["dictBody"]: paramprocessor = ParamProcessor( cloudformation.profile, cloudformation.region, file_data["dictBody"]["Parameters"], ) paramprocessor.process_stack_params() create_parameters = paramprocessor.processed_params else: create_parameters = [] cloudformation_args = { "cloudformation_action": cloudformation.client.create_stack, "StackName": stack_name, "TemplateBody": file_data["body"], "Parameters": create_parameters, } return cloudformation_args
def local_replacing_update(cloudformation: Cloudformation, local_path: str) -> Dict[str, Any]: """Format cloudformation argument for a local replacing update. Local replacing update as in using a template in the local machine to perform stack update. Process the new template and also comparing with previous parameter value to provide an old value preview. :param cloudformation: Cloudformation instance :type cloudformation: Cloudformation :param local_path: local file path to the template :type local_path: str :return: formatted argument thats ready to be used by boto3 :rtype: Dict[str, Any] """ check_is_valid(local_path) validate_stack( cloudformation.profile, cloudformation.region, local_path=local_path, no_print=True, ) fileloader = FileLoader(path=local_path) file_data: Dict[str, Any] = {} if is_yaml(local_path): file_data = fileloader.process_yaml_file() elif is_json(local_path): file_data = fileloader.process_json_file() # process params if "Parameters" in file_data["dictBody"]: paramprocessor = ParamProcessor( cloudformation.profile, cloudformation.region, file_data["dictBody"]["Parameters"], cloudformation.stack_details.get("Parameters"), ) paramprocessor.process_stack_params() updated_parameters = paramprocessor.processed_params else: updated_parameters = [] cloudformation_args = { "cloudformation_action": cloudformation.client.update_stack, "StackName": cloudformation.stack_name, "TemplateBody": file_data["body"], "UsePreviousTemplate": False, "Parameters": updated_parameters, } return cloudformation_args
def setUp(self): self.capturedOutput = io.StringIO() self.data_path = os.path.join( os.path.dirname(os.path.abspath(__file__)), "../data/cloudformation_template.yaml", ) sys.stdout = self.capturedOutput fileloader = FileLoader() config_path = Path(__file__).resolve().parent.joinpath( "../data/fzfaws.yml") fileloader.load_config_file(config_path=str(config_path))
def test_s3_replacing_update( self, mocked_stack, mocked_version, mocked_validate, mocked_data, mocked_process, mocked_url, mocked_execute, ): fileloader = FileLoader(self.data_path) mocked_data.return_value = fileloader.process_yaml_file() mocked_version.return_value = [{"VersionId": "111111"}] mocked_url.return_value = "https://s3-ap-southeast-2.amazonaws.com/kazhala-lol/hello.yaml?versionId=111111" update_stack(replace=True, bucket="kazhala-lol/hello.yaml", version=True) mocked_version.assert_called_with("kazhala-lol", "hello.yaml") mocked_validate.assert_called_with( "default", "us-east-1", bucket="kazhala-lol/hello.yaml", version="111111", no_print=True, ) mocked_execute.assert_called_with( Parameters=[], StackName="", TemplateURL= "https://s3-ap-southeast-2.amazonaws.com/kazhala-lol/hello.yaml?versionId=111111", UsePreviousTemplate=False, cloudformation_action=ANY, ) mocked_version.reset_mock() update_stack(replace=True, bucket="kazhala-lol/hello.yaml", version="111111") mocked_version.assert_not_called() mocked_validate.assert_called_with( "default", "us-east-1", bucket="kazhala-lol/hello.yaml", version="111111", no_print=True, ) mocked_execute.assert_called_with( Parameters=[], StackName="", TemplateURL= "https://s3-ap-southeast-2.amazonaws.com/kazhala-lol/hello.yaml?versionId=111111", UsePreviousTemplate=False, cloudformation_action=ANY, )
def setUp(self): data_path = os.path.join( os.path.dirname(os.path.abspath(__file__)), "../data/cloudformation_template.yaml", ) fileloader = FileLoader(path=data_path) params = fileloader.process_yaml_file()["dictBody"].get( "Parameters", {}) config_path = Path(__file__).resolve().parent.joinpath( "../data/fzfaws.yml") fileloader.load_config_file(config_path=str(config_path)) self.paramprocessor = ParamProcessor(params=params) self.capturedOutput = io.StringIO() sys.stdout = self.capturedOutput
def test_consctructor(self): self.assertEqual(self.fileloader.path, "") self.assertEqual(self.fileloader.body, "") fileloader = FileLoader(path=self.test_json.name) self.assertEqual(fileloader.path, self.test_json.name) self.assertEqual(fileloader.body, "")
def test_get_default_args(self): fileloader = FileLoader() config_path = Path(__file__).resolve().parent.joinpath( "../data/fzfaws.yml") fileloader.load_config_file(config_path=str(config_path)) result = get_default_args("ec2", ["start", "-e", "-m"]) self.assertEqual(result, ["start", "--wait", "-e", "-m"]) result = get_default_args("s3", ["upload", "-b", "-x"]) self.assertEqual(result, ["upload", "--hidden", "-b", "-x"]) result = get_default_args("s3", ["presign"]) self.assertEqual(result, ["presign", "-e", "3600"]) result = get_default_args("ec2", []) self.assertEqual(result, []) result = get_default_args("ec2", ["ls"]) self.assertEqual(result, ["ls"])
def non_replacing_update(cloudformation: Cloudformation) -> Dict[str, Any]: """Format the required argument for a non-replacing update for boto3. Non-replacing update as in not replacing the template, only updating the parameters. :param cloudformation: Cloudformation instance :type cloudformation: Cloudformation :return: formatted argument that's ready to be used by boto3 :rtype: Dict[str, Any] """ template_response = cloudformation.client.get_template( StackName=cloudformation.stack_name) fileloader = FileLoader(body=template_response.get("TemplateBody", "")) try: template_data: Dict[str, Any] = fileloader.process_json_body() except json.JSONDecodeError: template_data: Dict[str, Any] = fileloader.process_yaml_body() updated_parameters: List[Dict[str, Any]] = [] if template_data.get("Parameters"): paramprocessor = ParamProcessor( cloudformation.profile, cloudformation.region, template_data["Parameters"], cloudformation.stack_details.get("Parameters"), ) paramprocessor.process_stack_params() updated_parameters = paramprocessor.processed_params else: updated_parameters = [] cloudformation_args = { "cloudformation_action": cloudformation.client.update_stack, "StackName": cloudformation.stack_name, "UsePreviousTemplate": True, "Parameters": updated_parameters, } return cloudformation_args
def test_s3_creation( self, mocked_version, mocked_data, mocked_url, mocked_validate, mocked_input, mocked_execute, mocked_wait, mocked_process, ): mocked_input.return_value = "testing_stack" mocked_version.return_value = [{"VersionId": "111111"}] fileloader = FileLoader(self.data_path) mocked_data.return_value = fileloader.process_yaml_file() mocked_url.return_value = "https://s3-ap-southeast-2.amazonaws.com/kazhala-lol/hello.yaml?versionId=111111" create_stack(bucket="kazhala-lol/hello.yaml", version=True) mocked_version.assert_called_with("kazhala-lol", "hello.yaml") mocked_validate.assert_called_with( "default", "us-east-1", bucket="kazhala-lol/hello.yaml", version="111111", no_print=True, ) mocked_data.assert_called_with("yaml") mocked_url.assert_called_with(version="111111") mocked_execute.assert_called_with( Parameters=[], StackName="testing_stack", TemplateURL= "https://s3-ap-southeast-2.amazonaws.com/kazhala-lol/hello.yaml?versionId=111111", cloudformation_action=ANY, ) mocked_wait.assert_not_called() mocked_version.reset_mock() create_stack( profile="root", region="us-east-1", bucket="kazhala-lol/hello.yaml", version="111111", wait=True, ) mocked_version.assert_not_called() mocked_validate.assert_called_with( "root", "us-east-1", bucket="kazhala-lol/hello.yaml", version="111111", no_print=True, ) mocked_data.assert_called_with("yaml") mocked_url.assert_called_with(version="111111") mocked_execute.assert_called_with( Parameters=[], StackName="testing_stack", TemplateURL= "https://s3-ap-southeast-2.amazonaws.com/kazhala-lol/hello.yaml?versionId=111111", cloudformation_action=ANY, ) mocked_wait.assert_called_with("stack_create_complete", "Waiting for stack to be ready ...")
class TestFileLoader(unittest.TestCase): def setUp(self): self.fileloader = FileLoader() curr_path = os.path.dirname(os.path.abspath(__file__)) self.test_json = tempfile.NamedTemporaryFile() with open(self.test_json.name, "w") as file: file.write(json.dumps({"hello": "world", "foo": "boo"})) self.test_yaml = os.path.join(curr_path, "../data/fzfaws.yml") def tearDown(self): # reset cloudformation profile/region to align with test config file os.environ["FZFAWS_CLOUDFORMATION_PROFILE"] = "" os.environ["FZFAWS_CLOUDFORMATION_REGION"] = "" # reload config file self.fileloader.load_config_file(config_path=self.test_yaml) def test_consctructor(self): self.assertEqual(self.fileloader.path, "") self.assertEqual(self.fileloader.body, "") fileloader = FileLoader(path=self.test_json.name) self.assertEqual(fileloader.path, self.test_json.name) self.assertEqual(fileloader.body, "") def test_process_yaml_file(self): self.fileloader.path = self.test_yaml result = self.fileloader.process_yaml_file() self.assertRegex(result["body"], r".*fzf:\n") if "executable" not in result["dictBody"]["fzf"]: self.fail("Yaml file is not read properly") def test_process_json_body(self): self.fileloader.path = self.test_json.name result = self.fileloader.process_json_file() self.assertRegex(result["body"], r".*hello.*foo") if "foo" not in result["dictBody"]: self.fail("Json file is not read properly") @patch.object(FileLoader, "_set_cloudformation_env") @patch.object(FileLoader, "_set_s3_env") @patch.object(FileLoader, "_set_ec2_env") @patch.object(FileLoader, "_set_gloable_env") @patch.object(FileLoader, "_set_fzf_env") def test_load_config_file( self, mocked_set_fzf, mocked_set_global, mocked_set_ec2, mocked_set_s3, mocked_set_cloudformation, ): self.fileloader.path = self.test_yaml self.fileloader.load_config_file(config_path=self.test_yaml) mocked_set_fzf.assert_called_once() mocked_set_global.assert_called_once() mocked_set_ec2.assert_called_once() mocked_set_s3.assert_called_once() mocked_set_cloudformation.assert_called_once() def test_set_cloudformation_env(self): # normal test self.fileloader.load_config_file(config_path=self.test_yaml) self.assertEqual(os.getenv("FZFAWS_CLOUDFORMATION_PROFILE", ""), "") self.assertEqual(os.getenv("FZFAWS_CLOUDFORMATION_REGION", ""), "") self.assertEqual(os.environ["FZFAWS_CLOUDFORMATION_CREATE"], "--wait --extra") self.assertEqual(os.environ["FZFAWS_CLOUDFORMATION_DELETE"], "--wait") self.assertEqual(os.environ["FZFAWS_CLOUDFORMATION_UPDATE"], "--wait --extra") self.assertEqual( os.environ["FZFAWS_CLOUDFORMATION_WAITER"], json.dumps({ "delay": 30, "max_attempts": 120 }), ) # reset os.environ["FZFAWS_CLOUDFORMATION_CREATE"] = "" os.environ["FZFAWS_CLOUDFORMATION_DELETE"] = "" os.environ["FZFAWS_CLOUDFORMATION_UPDATE"] = "" os.environ["FZFAWS_CLOUDFORMATION_WAITER"] = "" # empty test self.fileloader._set_cloudformation_env({}) self.assertEqual(os.getenv("FZFAWS_CLOUDFORMATION_PROFILE", ""), "") self.assertEqual(os.getenv("FZFAWS_CLOUDFORMATION_REGION", ""), "") self.assertEqual(os.environ["FZFAWS_CLOUDFORMATION_CREATE"], "") self.assertEqual(os.environ["FZFAWS_CLOUDFORMATION_DELETE"], "") self.assertEqual(os.environ["FZFAWS_CLOUDFORMATION_UPDATE"], "") self.assertEqual(os.environ["FZFAWS_CLOUDFORMATION_WAITER"], "") # custom settings self.fileloader._set_cloudformation_env({ "profile": "root", "region": "us-east-2", "default_args": { "create": "-l" } }) self.assertEqual(os.environ["FZFAWS_CLOUDFORMATION_PROFILE"], "root") self.assertEqual(os.environ["FZFAWS_CLOUDFORMATION_REGION"], "us-east-2") self.assertEqual(os.environ["FZFAWS_CLOUDFORMATION_CREATE"], "-l") def test_set_s3_env(self): # normal test self.fileloader.load_config_file(config_path=self.test_yaml) self.assertEqual( os.environ["FZFAWS_S3_TRANSFER"], json.dumps({ "multipart_threshold": 8, "multipart_chunksize": 8, "max_concurrency": 10, "max_io_queue": 100, "num_download_attempts": 6, }), ) self.assertEqual(os.environ["FZFAWS_S3_PROFILE"], "default") self.assertEqual(os.environ["FZFAWS_S3_UPLOAD"], "--hidden") self.assertEqual(os.environ["FZFAWS_S3_DOWNLOAD"], "--hidden") self.assertEqual(os.environ["FZFAWS_S3_PRESIGN"], "-e 3600") self.assertEqual(os.getenv("FZFAWS_S3_LS", ""), "") # reset os.environ["FZFAWS_S3_TRANSFER"] = "" os.environ["FZFAWS_S3_PROFILE"] = "" os.environ["FZFAWS_S3_UPLOAD"] = "" os.environ["FZFAWS_S3_DOWNLOAD"] = "" os.environ["FZFAWS_S3_PRESIGN"] = "" # empty test self.fileloader._set_s3_env({}) self.assertEqual(os.getenv("FZFAWS_S3_TRANSFER", ""), "") self.assertEqual(os.getenv("FZFAWS_S3_PROFILE", ""), "") self.assertEqual(os.getenv("FZFAWS_S3_UPLOAD", ""), "") self.assertEqual(os.getenv("FZFAWS_S3_DOWNLOAD", ""), "") self.assertEqual(os.getenv("FZFAWS_S3_PRESIGN", ""), "") # custom settings self.fileloader._set_s3_env({ "transfer_config": { "multipart_threshold": 1, "multipart_chunksize": 1 }, "profile": "root", "default_args": { "upload": "-R", "ls": "-b" }, }) self.assertEqual( os.environ["FZFAWS_S3_TRANSFER"], json.dumps({ "multipart_threshold": 1, "multipart_chunksize": 1, }), ) self.assertEqual(os.environ["FZFAWS_S3_UPLOAD"], "-R") self.assertEqual(os.environ["FZFAWS_S3_PROFILE"], "root") self.assertEqual(os.environ["FZFAWS_S3_LS"], "-b") self.assertEqual(os.getenv("FZFAWS_S3_DOWNLOAD", ""), "") self.assertEqual(os.getenv("FZFAWS_S3_PRESIGN", ""), "") def test_set_ec2_env(self): # normal test self.fileloader.load_config_file(config_path=self.test_yaml) self.assertEqual(os.environ["FZFAWS_EC2_KEYPAIRS"], "~/.ssh") self.assertEqual( os.environ["FZFAWS_EC2_WAITER"], json.dumps({ "delay": 10, "max_attempts": 60 }), ) self.assertEqual(os.environ["FZFAWS_EC2_START"], "--wait") self.assertEqual(os.environ["FZFAWS_EC2_STOP"], "--wait") self.assertEqual(os.environ["FZFAWS_EC2_REGION"], "us-east-1") self.assertEqual(os.environ["FZFAWS_EC2_PROFILE"], "default") # reset os.environ["FZFAWS_EC2_WAITER"] = "" os.environ["FZFAWS_EC2_KEYPAIRS"] = "" os.environ["FZFAWS_EC2_START"] = "" os.environ["FZFAWS_EC2_STOP"] = "" os.environ["FZFAWS_EC2_PROFILE"] = "" os.environ["FZFAWS_EC2_REGION"] = "" # empty test self.fileloader._set_ec2_env({}) self.assertEqual(os.getenv("FZFAWS_EC2_WAITER", ""), "") self.assertEqual(os.getenv("FZFAWS_EC2_START", ""), "") self.assertEqual(os.getenv("FZFAWS_EC2_STOP", ""), "") self.assertEqual(os.getenv("FZFAWS_EC2_KEYPAIRS", ""), "") self.assertEqual(os.getenv("FZFAWS_EC2_REGION", ""), "") self.assertEqual(os.getenv("FZFAWS_EC2_PROFILE", ""), "") # custom settings self.fileloader._set_ec2_env({ "keypair": "$HOME/Anywhere/aws", "waiter": { "max_attempts": 40 }, "default_args": { "ssh": "-A" }, "region": "us-east-1", "profile": "root", }) self.assertEqual(os.environ["FZFAWS_EC2_KEYPAIRS"], "$HOME/Anywhere/aws") self.assertEqual( os.environ["FZFAWS_EC2_WAITER"], json.dumps({"max_attempts": 40}), ) self.assertEqual(os.environ["FZFAWS_EC2_SSH"], "-A") self.assertEqual(os.environ["FZFAWS_EC2_REGION"], "us-east-1") self.assertEqual(os.environ["FZFAWS_EC2_PROFILE"], "root") def test_set_global_env(self): # normal test self.fileloader.load_config_file(config_path=self.test_yaml) self.assertEqual( os.environ["FZFAWS_GLOBAL_WAITER"], json.dumps({ "delay": 15, "max_attempts": 40 }), ) self.assertEqual(os.environ["FZFAWS_GLOBAL_REGION"], "us-east-1") self.assertEqual(os.environ["FZFAWS_GLOBAL_PROFILE"], "default") # reset os.environ["FZFAWS_GLOBAL_WAITER"] = "" os.environ["FZFAWS_GLOBAL_PROFILE"] = "" os.environ["FZFAWS_GLOBAL_REGION"] = "" # empty test self.fileloader._set_gloable_env({}) self.assertEqual(os.getenv("FZFAWS_GLOBAL_WAITER", ""), "") self.assertEqual(os.environ["FZFAWS_GLOBAL_REGION"], "") self.assertEqual(os.environ["FZFAWS_GLOBAL_PROFILE"], "") # custom settings self.fileloader._set_gloable_env({ "profile": "root", "region": "us-east-1", "waiter": { "delay": 10 } }) self.assertEqual(os.environ["FZFAWS_GLOBAL_WAITER"], json.dumps({"delay": 10})) self.assertEqual(os.environ["FZFAWS_GLOBAL_REGION"], "us-east-1") self.assertEqual(os.environ["FZFAWS_GLOBAL_PROFILE"], "root") os.environ["FZFAWS_GLOBAL_WAITER"] = "" self.fileloader._set_gloable_env({ "profile": "root", "region": "us-east-1" }) self.assertEqual(os.environ["FZFAWS_GLOBAL_WAITER"], "") self.assertEqual(os.environ["FZFAWS_GLOBAL_REGION"], "us-east-1") self.assertEqual(os.environ["FZFAWS_GLOBAL_PROFILE"], "root") def test_set_fzf_env(self): # normal test self.fileloader.load_config_file(config_path=self.test_yaml) self.assertEqual(os.environ["FZFAWS_FZF_EXECUTABLE"], "binary") self.assertEqual( os.environ["FZFAWS_FZF_KEYS"], "--bind=alt-a:toggle-all,alt-j:jump,alt-0:top,alt-s:toggle-sort", ) self.assertRegex(os.environ["FZFAWS_FZF_OPTS"], r"^--color=dark\s--color=.*") # reset os.environ["FZFAWS_FZF_EXECUTABLE"] = "" os.environ["FZFAWS_FZF_OPTS"] = "" os.environ["FZFAWS_FZF_KEYS"] = "" # empty test self.fileloader._set_fzf_env({}) self.assertEqual(os.getenv("FZFAWS_FZF_KEYS", ""), "") self.assertEqual(os.getenv("FZFAWS_FZF_EXECUTABLE", ""), "") self.assertEqual(os.getenv("FZFAWS_FZF_OPTS", ""), "") # custom settings self.fileloader._set_fzf_env({ "executable": "system", "args": "hello", "keybinds": { "foo": "boo" } }) self.assertEqual(os.environ["FZFAWS_FZF_KEYS"], "--bind=boo:foo") self.assertEqual(os.environ["FZFAWS_FZF_OPTS"], "hello") self.assertEqual(os.environ["FZFAWS_FZF_EXECUTABLE"], "system") def test_set_spinner_env(self): self.fileloader.load_config_file(config_path=self.test_yaml) self.assertEqual(os.environ["FZFAWS_SPINNER_SPEED"], "0.1") self.assertEqual(os.environ["FZFAWS_SPINNER_MESSAGE"], "loading ...") self.assertEqual(os.environ["FZFAWS_SPINNER_PATTERN"], "|/-\\") # reset os.environ["FZFAWS_SPINNER_PATTERN"] = "" os.environ["FZFAWS_SPINNER_MESSAGE"] = "" os.environ["FZFAWS_SPINNER_SPEED"] = "" # empty test self.fileloader._set_spinner_env({}) self.assertEqual(os.getenv("FZFAWS_SPINNER_SPEED", ""), "") self.assertEqual(os.getenv("FZFAWS_SPINNER_MESSAGE", ""), "") self.assertEqual(os.getenv("FZFAWS_SPINNER_PATTERN", ""), "") # custom settings self.fileloader._set_spinner_env({ "message": "hello", "speed": "0.8", "pattern": "xxx" }) self.assertEqual(os.environ["FZFAWS_SPINNER_SPEED"], "0.8") self.assertEqual(os.environ["FZFAWS_SPINNER_MESSAGE"], "hello") self.assertEqual(os.environ["FZFAWS_SPINNER_PATTERN"], "xxx")
def main() -> None: """Entry function of the fzf.aws module.""" try: parser = argparse.ArgumentParser( description="An interactive aws cli experience powered by fzf.", prog="fzfaws", ) parser.add_argument( "-v", "--version", action="store_true", default=False, help="display the current version", ) parser.add_argument( "--copy-config", dest="copy_config", action="store_true", default=False, help= "copy the configuration file to $XDG_CONFIG_HOME/fzfaws/ or $HOME/.config/fzfaws/", ) subparsers = parser.add_subparsers(dest="subparser_name") subparsers.add_parser("cloudformation") subparsers.add_parser("ec2") subparsers.add_parser("s3") if len(sys.argv) < 2: parser.print_help() sys.exit(1) args = parser.parse_args([sys.argv[1]]) if args.copy_config: copy_config() sys.exit(0) elif args.version: version = pkg_resources.require("fzfaws")[0].version print("Current fzfaws version: %s" % version) sys.exit(0) fileloader = FileLoader() fileloader.load_config_file() argument_list = get_default_args(args.subparser_name, sys.argv[2:]) if args.subparser_name == "cloudformation": cloudformation(argument_list) elif args.subparser_name == "ec2": ec2(argument_list) elif args.subparser_name == "s3": s3(argument_list) except InvalidFileType: print("Selected file is not a valid file type") sys.exit(1) except SystemExit: raise except (KeyboardInterrupt, SystemError): sys.exit(1) except NoSelectionMade: print("No selection was made or the result was empty") sys.exit(1) except (ClientError, Exception) as e: print(e) sys.exit(1)