Exemplo n.º 1
0
    def __init__(self,
                 config,
                 save_failure,
                 defaults: CLIDefaults,
                 secrets_mgr: SecretsManager,
                 save_flow=False):
        """The Google object holds authentication state
        for a given session. You need to supply:
        username: FQDN Google username, eg [email protected]
        password: obvious
        idp_id: Google's assigned IdP identifier for your G-suite account
        sp_id: Google's assigned SP identifier for your AWS SAML app
        Optionally, you can supply:
        duration_seconds: number of seconds for the session to be active (max 43200)
        """

        self.version = VERSION
        self.config = config
        self.base_url = 'https://accounts.google.com'
        self.save_failure = save_failure
        self.session_state = None
        self._defaults = defaults
        self.save_flow = save_flow
        self._secrets_mgr = secrets_mgr
        self._out = Output(defaults.colors_enabled)

        if save_flow:
            self.save_flow_dict = {}
            self.save_flow_dir = "aws-google-auth-" + datetime.now().strftime(
                '%Y-%m-%dT%H%M%S')
            os.makedirs(self.save_flow_dir, exist_ok=True)
Exemplo n.º 2
0
 def __init__(self, ssm_init: SsmDao, audit_init: AuditDao,
              config_completer_init: WordCompleter, colors_enabled: bool,
              config_context: ConfigContext):
     super().__init__(audit, colors_enabled, config_context)
     self._ssm = ssm_init
     self._audit_dao = audit_init
     self._config_completer = config_completer_init
     self._utils = Utils(colors_enabled)
     self._out = Output(colors_enabled)
Exemplo n.º 3
0
 def __init__(self, source_ssm: SsmDao, config_completer_init: WordCompleter,
              colors_enabled: bool, config_context: ConfigContext, session_mgr: SessionManager):
     super().__init__(promote, colors_enabled, config_context)
     self.config_context = config_context
     self._source_ssm = source_ssm
     self._session_mgr = session_mgr
     self._config_completer = config_completer_init
     self._utils = Utils(colors_enabled)
     self._out = Output(colors_enabled)
Exemplo n.º 4
0
    def __init__(self, ssm_init, repl_init: ReplicationDao,
                 config_completer_init, colors_enabled: bool,
                 config_context: ConfigContext):
        super().__init__(share, colors_enabled, config_context)

        self._ssm = ssm_init
        self._repl = repl_init
        self._config_completer = config_completer_init
        self._utils = Utils(colors_enabled)
        self._out = Output(colors_enabled)
Exemplo n.º 5
0
 def __init__(self, maintenance_context: MaintenanceContext,
              config_service: Optional[ConfigService]):
     super().__init__(version, maintenance_context.defaults.colors_enabled,
                      maintenance_context)
     self.tracker = VersionTracker(self.context.defaults, config_service)
     self.upgrade_mgr = UpgradeManager(
         maintenance_context.defaults.colors_enabled)
     self._utils = Utils(
         colors_enabled=maintenance_context.defaults.colors_enabled)
     self._out = Output(
         colors_enabled=maintenance_context.defaults.colors_enabled)
Exemplo n.º 6
0
 def __init__(self, ssm_init: SsmDao, cfg_view: RBACLimitedConfigView,
              config_init: ConfigDao, repl_init: ReplicationDao, context: ConfigContext, colors_enabled: bool,
              config_completer: WordCompleter):
     super().__init__(delete, colors_enabled, context)
     self._ssm = ssm_init
     self._config = config_init
     self._repl = repl_init
     self._utils = Utils(colors_enabled)
     self._config_completer = config_completer
     self._out = Output(colors_enabled)
     self._cfg_view = cfg_view
Exemplo n.º 7
0
    def __init__(self, help_context: HelpContext, figgy_setup: FiggySetup,
                 figgy_context: FiggyContext):
        super().__init__(login, Utils.not_windows(), help_context)
        self._setup = figgy_setup
        self._defaults: CLIDefaults = figgy_setup.get_defaults()
        self._figgy_context = figgy_context
        self._utils = Utils(self._defaults.colors_enabled)
        self._aws_cfg = AWSConfig(color=self.c)
        self._out = Output(self._defaults.colors_enabled)

        self.example = f"\n\n{self.c.fg_bl}{CLI_NAME} {login.name} \n" \
                       f"{self.c.rs}{self.c.fg_yl}  --or--{self.c.rs}\n" \
                       f"{self.c.fg_bl}{CLI_NAME} {login.name} {sandbox.name}{self.c.rs}"
Exemplo n.º 8
0
 def __init__(self, ssm_init: SsmDao, colors_enabled: bool,
              context: ConfigContext):
     super().__init__(validate, colors_enabled, context)
     self._ssm = ssm_init
     self._config_path = context.ci_config_path if context.ci_config_path else Utils.find_figgy_json(
     )
     self._utils = Utils(colors_enabled)
     self._replication_only = context.replication_only
     self._errors_detected = False
     self.example = f"{self.c.fg_bl}{CLI_NAME} config {self.command_printable} " \
                    f"--env dev --config /path/to/config{self.c.rs}"
     self._FILE_PREFIX = "file://"
     self._out = Output(colors_enabled)
Exemplo n.º 9
0
    def __init__(self,
                 ssm: SsmDao,
                 ddb: ConfigDao,
                 repl_dao: ReplicationDao,
                 context: ConfigContext,
                 config_completer_init: WordCompleter,
                 colors_enabled: bool,
                 delete: Delete,
                 args=None):
        super().__init__(prune, colors_enabled, context)
        self._ssm = ssm  # type: SsmDao
        self._config_dao = ddb  # type: ConfigDao
        self._repl = repl_dao
        self._config_completer = config_completer_init  # type: WordCompleter
        self._utils = Utils(colors_enabled)
        self.example = f"{self.c.fg_bl}{CLI_NAME} config {self.command_printable} --env dev " \
            f"--config /path/to/figgy.json{self.c.rs}"
        self._config_path = context.ci_config_path if context.ci_config_path else Utils.find_figgy_json(
        )
        self._out = Output(colors_enabled)

        # If user passes in --info flag, we don't need all of this to be initialized.
        if not hasattr(args, info.name) or args.info is False:
            # Validate & parse figgy.json
            self._config = self._utils.get_ci_config(
                self._config_path)  # type: Dict
            self._shared_names = set(
                self._utils.get_config_key_safe(SHARED_KEY,
                                                self._config,
                                                default=[]))  # type: Set
            self._repl_conf = self._utils.get_config_key_safe(
                REPLICATION_KEY, self._config, default={})  # type: Dict
            self._merge_conf = self._utils.get_config_key_safe(
                MERGE_KEY, self._config, default={})  # type: Dict
            self._config_keys = set(
                self._utils.get_config_key_safe(CONFIG_KEY,
                                                self._config,
                                                default=[]))  # type: Set
            self._merge_keys = set(self._merge_conf.keys())  # type: Set
            self._namespace = self._utils.get_namespace(
                self._config)  # type: str
            self._delete_command = delete
            self._repl_from_conf = self._utils.get_config_key_safe(
                REPL_FROM_KEY, self._config, default={})
            self._repl_conf = KeyUtils.merge_repl_and_repl_from_blocks(
                self._repl_conf, self._repl_from_conf, self._namespace)

            # Build list of all keys found across all config types
            self._all_keys = KeyUtils().find_all_expected_names(
                self._config_keys, self._shared_names, self._merge_conf,
                self._repl_conf, self._repl_from_conf, self._namespace)
Exemplo n.º 10
0
    def __init__(self, ssm_init: SsmDao, colors_enabled: bool,
                 config_context: ConfigContext,
                 config_view: RBACLimitedConfigView, get: Get):
        super().__init__(put, colors_enabled, config_context)
        self._ssm = ssm_init
        self._utils = Utils(colors_enabled)
        self._config_view = config_view
        self._get = get
        self._source_key = Utils.attr_if_exists(copy_from, config_context.args)
        self._out = Output(colors_enabled)

        self._select_name = [('class:', 'Please input a PS Name: ')]

        self._FILE_PREFIX = "file://"
Exemplo n.º 11
0
 def __init__(self, ssm_init: SsmDao, kms_init: KmsService,
              config_init: ConfigDao, repl_dao: ReplicationDao,
              audit_dao: AuditDao, cfg_view: RBACLimitedConfigView,
              colors_enabled: bool, context: ConfigContext,
              config_completer: WordCompleter, delete: Delete):
     super().__init__(restore, colors_enabled, context)
     self._config_context = context
     self._ssm = ssm_init
     self._kms = kms_init
     self._config = config_init
     self._repl = repl_dao
     self._audit = audit_dao
     self._cfg_view = cfg_view
     self._utils = Utils(colors_enabled)
     self._point_in_time = context.point_in_time
     self._config_completer = config_completer
     self._delete = delete
     self._out = Output(colors_enabled=colors_enabled)
Exemplo n.º 12
0
class EnvironmentValidator:
    """
    Houses genric environment validation logic that may branch based on current
    configurations, such as Bastion / OKTA / GOOGLE / ETC.
    """
    def __init__(self, defaults: CLIDefaults):
        self._defaults = defaults
        self._out = Output(self._defaults.colors_enabled)
        self._utils = Utils(self._defaults.colors_enabled)

    def validate_all(self):
        if self._defaults.provider == Provider.AWS_BASTION:
            self.validate_environment_variables()

        return self

    def validate_environment_variables(self):
        # If figgy is operating in a TEST environment, ignore this.
        if os.environ.get(FIGGY_TESTS_ENABLED):
            return self

        invalid_vars = []

        for env_var in RESTRICTED_ENV_VARS:
            if os.environ.get(env_var):
                invalid_vars.append(env_var)

        if invalid_vars:
            self._out.error_h2(
                f'AWS Environment overrides detected.\n\n {invalid_vars} is currently set in your '
                f'environment. AWS_* prefixed environment variables can interfere with figgy '
                f'operations and may cause unpredictable behavior. Please unset all AWS_ prefixed ENV '
                f'variables before continuing.')

            self._out.print(
                '\nTo unset the problematic variables, please run the following command(s) in your shell: '
                '\n')
            for var in invalid_vars:
                self._out.print(f'unset {var}')

            self._utils.error_exit("Invalid environment detected, exiting.")

        return self
Exemplo n.º 13
0
class Put(OTSCommand):
    """
    Allows retrieval of a recently stored one-time-secret.
    """
    def __init__(self, ots_svc: OTSService, ots_context: OTSContext,
                 colors_enabled: bool):
        super().__init__(ots_put, colors_enabled, ots_context)
        self._ots = ots_svc
        self._utils = Utils(colors_enabled)
        self._out = Output(colors_enabled)

    def _put(self):
        value = Input.input(f"Please input a value to share: ")

        # Safe convert to int or float, then validate
        expires_in_hours = Input.input(
            f"Select # of hours before value auto-expires: ", default="1")
        expires_in_hours = Utils.safe_cast(expires_in_hours, int,
                                           expires_in_hours)
        expires_in_hours = Utils.safe_cast(expires_in_hours, float,
                                           expires_in_hours)
        self._utils.validate(
            isinstance(expires_in_hours, int)
            or isinstance(expires_in_hours, float),
            "You must provide a number of hours for when this secret should expire. No strings accepted."
        )
        self._utils.validate(
            expires_in_hours <= 48,
            "You may not specify an expiration time more than 48 hours in the future."
        )

        secret_id = self._ots.put_ots(value, expires_in_hours)
        self._out.print(
            f"\n\nTo share this secret, recipients will need the following")
        self._out.print(f"\n[[Secret Id]] -> {secret_id}")
        self._out.success(
            f"\n\nValue successfully stored, it will expire in {expires_in_hours} hours, or when retrieved."
        )

    def execute(self):
        self._put()
Exemplo n.º 14
0
class Audit(ConfigCommand):
    """
    Returns audit history for a queried PS Name
    """
    def __init__(self, ssm_init: SsmDao, audit_init: AuditDao,
                 config_completer_init: WordCompleter, colors_enabled: bool,
                 config_context: ConfigContext):
        super().__init__(audit, colors_enabled, config_context)
        self._ssm = ssm_init
        self._audit_dao = audit_init
        self._config_completer = config_completer_init
        self._utils = Utils(colors_enabled)
        self._out = Output(colors_enabled)

    def _audit(self):
        audit_more = True

        while audit_more:
            ps_name = Input.input(f"Please input a PS Name : ",
                                  completer=self._config_completer)
            audit_logs = self._audit_dao.get_audit_logs(ps_name)
            result_count = len(audit_logs)
            if result_count > 0:
                self._out.print(f"\nFound [[{result_count}]] results.")
            else:
                self._out.warn(f"\nNo results found for: [[{ps_name}]]")
            for log in audit_logs:
                self._out.print(log.pretty_print())

            to_continue = input(f"Audit another? (Y/n): ")
            to_continue = to_continue if to_continue != '' else 'y'
            audit_more = to_continue.lower() == "y"
            print()

    @VersionTracker.notify_user
    @AnonymousUsageTracker.track_command_usage
    def execute(self):
        self._audit()
Exemplo n.º 15
0
class Validate(ConfigCommand):
    def __init__(self, ssm_init: SsmDao, colors_enabled: bool,
                 context: ConfigContext):
        super().__init__(validate, colors_enabled, context)
        self._ssm = ssm_init
        self._config_path = context.ci_config_path if context.ci_config_path else Utils.find_figgy_json(
        )
        self._utils = Utils(colors_enabled)
        self._replication_only = context.replication_only
        self._errors_detected = False
        self.example = f"{self.c.fg_bl}{CLI_NAME} config {self.command_printable} " \
                       f"--env dev --config /path/to/config{self.c.rs}"
        self._FILE_PREFIX = "file://"
        self._out = Output(colors_enabled)

    def _validate(self):
        missing_key = False
        config = self._utils.get_ci_config(self._config_path)
        shared_names = set(
            self._utils.get_config_key_safe(SHARED_KEY, config, default=[]))
        repl_conf = self._utils.get_config_key_safe(REPLICATION_KEY,
                                                    config,
                                                    default={})
        repl_from_conf = self._utils.get_config_key_safe(REPL_FROM_KEY,
                                                         config,
                                                         default={})
        merge_conf = self._utils.get_config_key_safe(MERGE_KEY,
                                                     config,
                                                     default={})
        config_keys = set(
            self._utils.get_config_key_safe(CONFIG_KEY, config, default=[]))
        namespace = self._utils.get_namespace(config)
        all_names = KeyUtils.find_all_expected_names(config_keys, shared_names,
                                                     merge_conf, repl_conf,
                                                     repl_from_conf, namespace)

        all_params = self._ssm.get_all_parameters([namespace])

        all_param_names = []
        for param in all_params:
            all_param_names.append(param['Name'])

        print()
        for name in all_names:
            if name not in all_param_names:
                self._out.warn(
                    f"Fig missing from [[{self.run_env}]] environment Parameter Store: [[{name}]]"
                )
                missing_key = True
            else:
                self._out.print(f"Fig found in ParameterStore: [[{name}]].")

        if missing_key:
            print("\n\n")
            self._utils.error_exit(f"{MISSING_PS_NAME_MESSAGE}")
        else:
            self._out.success(
                f"\nSuccess! All figs have been located in the [[{self.run_env}]] ParameterStore!"
            )

    @VersionTracker.notify_user
    @AnonymousUsageTracker.track_command_usage
    def execute(self):
        self._validate()
Exemplo n.º 16
0
class Utils:
    def __init__(self, colors_enabled=False):
        self.c = TerminalFactory(colors_enabled).instance().get_colors()
        self._o = Output(colors_enabled)

    @staticmethod
    def retry(function):
        """
        Decorator that supports automatic retries if connectivity issues are detected with boto or urllib operations
        """
        def inner(self, *args, **kwargs):
            retries = 0
            while True:
                try:
                    return function(self, *args, **kwargs)
                except (botocore.exceptions.EndpointConnectionError,
                        urllib3.exceptions.NewConnectionError) as e:
                    print(e)
                    if retries > MAX_RETRIES:
                        raise e

                    Utils.stc_notify(
                        "Network connectivity issues detected. Retrying with back off..."
                    )
                    retries += 1
                    time.sleep(retries * BACKOFF)

        return inner

    @staticmethod
    def trace(func):
        """
        Decorator that adds logging around function execution and function parameters.
        """
        def wrapper(*args, **kwargs):
            log.debug(f"Entering function: {func.__name__} with args: {args}")
            start = time.time()
            result = func(*args, **kwargs)
            log.debug(
                f"Exiting function: {func.__name__} and returning: {result}")
            log.info(
                f"{func.__name__} complete after {round(time.time() - start, 2)} seconds."
            )
            return result

        return wrapper

    @staticmethod
    def millis_since_epoch():
        return int(time.time() * 1000)

    @staticmethod
    def get_os():
        return platform.system()

    @staticmethod
    def not_windows():
        return platform != WINDOWS

    @staticmethod
    def is_linux():
        return platform.system() == LINUX

    @staticmethod
    def is_mac():
        return platform.system() == MAC

    @staticmethod
    def is_windows():
        return platform.system() == WINDOWS

    @staticmethod
    def find_figgy_json():
        for path in DEFAULT_FIGGY_JSON_PATHS:
            if Path(path).is_file():
                return path

        return 'figgy.json'

    @staticmethod
    def file_exists(path: str):
        return Path(path).is_file()

    @staticmethod
    def is_symlink(path: str):
        return os.path.islink(path)

    @staticmethod
    def is_set_true(command: CliCommand, args) -> bool:
        attr_name = command.standardized_name
        return args.__dict__.get(attr_name, False)

    @staticmethod
    def command_set(check_command: CliCommand, args):
        command_name = args.command if hasattr(args, 'command') else None
        return command_name == check_command.name

    @staticmethod
    def attr_if_exists(command: CliCommand,
                       args,
                       default=None) -> Union[object, None]:
        attr_name = command.standardized_name
        return args.__dict__.get(attr_name, default)

    @staticmethod
    def attr_exists(command: CliCommand, args) -> bool:
        attr_name = command.standardized_name
        return args.__dict__.get(attr_name, None) is not None

    @staticmethod
    def sanitize_session_name(name: str):
        return re.sub(r'\W+', '', name)[:15]

    @staticmethod
    def wipe_defaults():
        try:
            os.remove(DEFAULTS_FILE_CACHE_PATH)
        except OSError:
            pass

    @staticmethod
    def wipe_config_cache():
        try:
            os.remove(CONFIG_CACHE_FILE_PATH)
        except OSError:
            pass

    @staticmethod
    def wipe_vaults():
        for file in FIGGY_VAULT_FILES:
            try:
                os.remove(file)
            except OSError:
                pass

    def notify(self, message: str):
        print(f'{self.c.fg_bl}{message}{self.c.rs}')

    @staticmethod
    def stc_notify(message: str):
        print(message)

    @staticmethod
    def stc_warn(message: str):
        print(f"WARNING: {message}")

    def merge_config_contents(self, a: Dict, b: Dict, a_path: str,
                              b_path: str):
        for key in b:
            if isinstance(b[key], dict):
                if key in b and key in a:
                    dupes = b[key].keys() & a[key].keys()
                    self.validate(
                        dupes == set(),
                        f"Duplicate keys found between your configs. You may not have "
                        f"two instances of {key} with overlapping keys. Culprits: {dupes}"
                    )

                a_key = a[key] if key in a else {}
                b_key = b[key] if key in b else {}
                a[key] = {**a_key, **b_key}

            elif isinstance(b[key], list):
                a_key = a[key] if key in a else []
                b_key = b[key] if key in b else []
                a[key] = a_key + b_key
            else:
                self.error_exit(
                    f"Unable to merge config values of type: {type(b[key])} for "
                    f"specified key {key} in both {a_path} and {b_path}")

        return a

    def get_repl_config(self, repl_config_path: str):
        if not os.path.exists(repl_config_path):
            raise ValueError(
                f"Invalid replication config specified: {repl_config_path}")

        with open(repl_config_path, "r") as file:
            contents = file.read()
            self.validate(
                contents != '', f"File provided at: {repl_config_path} "
                f"cannot be empty.")
            self.validate(
                self.is_json(contents),
                "File provided contains invalid json. Please remediate.")
            conf = json.loads(contents)
            self.validate(
                REPLICATION_KEY in conf,
                f"{REPLICATION_KEY} is missing from replication config: "
                f"{repl_config_path}. This file is invalid.")
            return conf.get(REPLICATION_KEY, {})

    def get_ci_config(self, ci_config_path: str) -> Dict:
        self.validate(
            ci_config_path.endswith('.json'),
            "The figgy config file must end with the extension '.json'. A name of `figgy.json` is "
            "recommended for most use cases..")

        self.validate(
            os.path.exists(ci_config_path),
            f"Path {ci_config_path} is invalid. That file does not exist.")

        # Read & Validate figgy.json
        try:
            with open(ci_config_path, "r") as file:
                base_matcher = re.search('^(.*[/]*).*.json$', ci_config_path)
                base_path = base_matcher.group(1)
                contents = file.read()
                self.validate(
                    contents != '',
                    f"File provided at: {self.c.fg_rd}{ci_config_path}{self.c.rs} cannot be empty."
                )
                ci_config = json.loads(contents)

                if IMPORTS_KEY in ci_config:
                    for import_val in ci_config[IMPORTS_KEY]:
                        import_path = f"{base_path}/{import_val}"
                        print(f"Loading imported config: {import_path}")
                        with open(import_path) as import_file:
                            contents = import_file.read()
                            imported_config = json.loads(contents)
                            ci_config = self.merge_config_contents(
                                ci_config, imported_config, ci_config_path,
                                import_path)

                namespace = self.get_namespace(ci_config)
                app_figs = self.get_config_key_safe(CONFIG_KEY,
                                                    ci_config,
                                                    default=[])
                dupes = self.find_dupes(app_figs)
                self.validate(
                    not dupes,
                    f"Your configuration has duplicate keys: {self.c.fg_rd}{dupes}{self.c.rs}"
                )

                ns_app_params = self.standardize_parameters(
                    namespace, app_figs)
                ci_config[CONFIG_KEY] = ns_app_params

                ns_shared_params = self.standardize_parameters(
                    namespace,
                    self.get_config_key_safe(SHARED_KEY, ci_config,
                                             default=[]))
                ci_config[SHARED_KEY] = ns_shared_params

                dupes = self.find_dupes(
                    list(
                        self.get_config_key_safe(REPLICATION_KEY,
                                                 ci_config,
                                                 default={}).values()))
                self.validate(
                    not dupes,
                    f"Your configuration has duplicate values in your replicated values "
                    f"config: {self.c.fg_rd}{dupes}{self.c.rs}")

                self.validate(
                    len(
                        self.get_config_key_safe(
                            CONFIG_KEY, ci_config, default=[])) > 0
                    or OPTIONAL_NAMESPACE in ci_config,
                    f"If you have no defined Names under: {CONFIG_KEY} you must "
                    f"specify an {OPTIONAL_NAMESPACE} parameter instead with a "
                    f"value of '/app/your-service-name/'")

                return ci_config

        except json.decoder.JSONDecodeError as e:
            print(
                f"{self.c.fg_rd}Error decoding json in figgy.json. Invalid JSON detected. "
                f"Caught error: {e}{self.c.rs}")
            exit(1)
        except FileNotFoundError as e:
            print(
                f"{self.c.fg_rd}File at path {ci_config_path} does not exist or could not be read. "
                f"Are you sure you provided a valid file path?{self.c.rs}")
            exit(1)

    def standardize_parameters(self, namespace: str,
                               params: List[str]) -> List[str]:
        standardized = []
        for param in params:
            if not param.startswith(namespace):
                if param.startswith("/"):
                    standardized.append(
                        f'{namespace}{param[1:]}'
                    )  # just in case they have an extra / by accident.
                else:
                    standardized.append(f'{namespace}{param}')
            else:
                standardized.append(param)

        return standardized

    def get_namespace(self, config: Dict):
        if OPTIONAL_NAMESPACE in config:
            namespace = config[OPTIONAL_NAMESPACE]
        else:
            self.validate(
                CONFIG_KEY in config,
                f"You must specify an {CONFIG_KEY} or {OPTIONAL_NAMESPACE} block, "
                f"or both, in your figgy.json file.")
            namespace = self.parse_namespace(
                self.get_first(set(config[CONFIG_KEY])))

        self.validate(
            namespace is not None,
            f"Invalid namespace provided, or unable to parse valid "
            f"namespace from your {CONFIG_KEY} block.")
        if not namespace.endswith('/'):
            namespace = namespace + '/'

        return namespace

    def get_service_name(self, config: Dict):
        namespace = self.get_namespace(config)
        return namespace.split('/')[2]

    @staticmethod
    def find_command(command_name: str) -> Optional[CliCommand]:
        cmd = [cmd for cmd in all_commands if cmd.command == command_name]
        if cmd:
            return cmd.pop()
        else:
            return None

    @staticmethod
    def find_resource(resource_name: str) -> Optional[CliCommand]:
        cmd = [cmd for cmd in resources if cmd.command == resource_name]
        if cmd:
            return cmd.pop()
        else:
            return None

    @staticmethod
    def get_parameter_only(parameter_name: str):
        base_name = parameter_name
        """
        Takes /app/foo/a/full/path and returns a/full/path and always removes the namespace if it exists.
        :param parameter_name: name of a parameter, with or without the namespace.
        :return: parameter/name/path without any attached namespace.
        """
        try:
            get_param = re.compile(r"^/app/[A-Za-z0-9_-]+/(.*)")
            result = get_param.match(parameter_name)
            base_name = result.group(1)
        except (AttributeError, TypeError) as e:
            Utils.stc_error_exit(
                f"Unable to detect base name for parameter: {parameter_name}. {e}"
            )

        return base_name

    @staticmethod
    def parse_namespace(app_key: str) -> str:
        ns = None
        try:
            get_ns = re.compile(r"^(/app/[A-Za-z0-9_-]+/).*")
            val = get_ns.match(app_key)
            ns = val.group(1)
        except (AttributeError, TypeError) as e:
            print(
                f"Unable to parse namespace from {app_key}. If your app_figs block values do not begin with "
                f"the prefix /app/your-service-name , you must include the 'namespace' property in your figgy.json "
                f"with value /app/your-service-name/")

        return ns

    def find_dupes(self, lst: List):
        return [x for n, x in enumerate(lst) if x in lst[:n]]

    def error_exit(self, error_msg: str):
        print(f"\n{self.c.fg_rd}ERROR: >> {error_msg}{self.c.rs}")
        exit(1)

    def warn_exit(self, msg: str):
        self._o.warn_h2(msg)
        exit(0)

    @staticmethod
    def stc_error_exit(error_msg: str):
        print(f"ERROR: >> {error_msg}")
        exit(1)

    @staticmethod
    def write_error(file_name: str, error_message: str):
        with open(f'{ERROR_LOG_DIR}/{file_name}', "w+") as log:
            log.write(error_message)

    @staticmethod
    def stc_validate(boolean: bool, error_msg: str):
        if not boolean:
            Utils().error_exit(error_msg)

    def validate(self, boolean: bool, error_msg: str):
        if not boolean:
            self.error_exit(error_msg)

    def is_valid_selection(self, selection: str, notify: bool):
        result = selection is not None and (selection.lower() == "y"
                                            or selection.lower() == "n")
        if notify and not result:
            msg = f"You must input a selection of 'Y' or 'N'"
            print(f"{self.c.fg_rd}{msg}{self.c.rs}")
        return result

    def is_valid_input(self, input: str, field_name: str, notify: bool):
        result = input is not None and input != ""
        if notify and not result:
            msg = f"ERROR: Your input of >> {input} << is not valid for {field_name}. " \
                  f"You cannot input an empty string or None"
            print(f"{self.c.fg_rd}{msg}{self.c.rs}")
        return result

    @staticmethod
    def stc_is_valid_input(input: str, field_name: str, notify: bool):
        result = input is not None and input != ""
        if notify and not result:
            msg = f"ERROR: Your input of >> {input} << is not valid for {field_name}. " \
                  f"You cannot input an empty string or None"
            print(f"{msg}")
        return result

    @staticmethod
    def format_config(config: Dict) -> OrderedDict:
        """
        Takes a formatted figgy.json file dictionary and converts it to an ordered dictionary. This makes it possible
        to write the file back out as a more readable and logically formatted file.
        """

        ordered_config = OrderedDict()

        if config.get(SERVICE_KEY):
            ordered_config[SERVICE_KEY] = config.get(SERVICE_KEY)

        if config.get(PLUGIN_KEY):
            ordered_config[PLUGIN_KEY] = config.get(PLUGIN_KEY)

        if config.get(OPTIONAL_NAMESPACE):
            ordered_config[OPTIONAL_NAMESPACE] = config.get(OPTIONAL_NAMESPACE)

        if config.get(IMPORTS_KEY):
            ordered_config[IMPORTS_KEY] = config.get(IMPORTS_KEY)

        ordered_config[CONFIG_KEY] = config.get(CONFIG_KEY, [])

        if config.get(REPL_FROM_KEY):
            ordered_config[REPL_FROM_KEY] = config.get(REPL_FROM_KEY)

        if config.get(REPLICATION_KEY):
            ordered_config[REPLICATION_KEY] = config.get(REPLICATION_KEY)

        if config.get(MERGE_KEY):
            ordered_config[MERGE_KEY] = config.get(MERGE_KEY)

        if config.get(SHARED_KEY):
            ordered_config[SHARED_KEY] = config.get(SHARED_KEY)

        return ordered_config

    @staticmethod
    def get_first(some_set: Union[set, frozenset]):
        if some_set:
            return set(some_set).pop()

        return None

    @staticmethod
    def str_too_long(value: str) -> bool:
        """
        Is this string too log to store in PS?
        Args:
            value: string to count

        Returns: bool
        """
        return len(value) > 4096

    @staticmethod
    def is_json(obj: str) -> bool:
        """
        Take a string and returns whether or not it is parseable as json.
        Args:
            obj: String to check

        Returns: True/False
        """
        try:
            json.loads(obj)
        except JSONDecodeError:
            return False

        # json.loads considers "true" or "false" valid json, but it's not valid in our case..
        if obj == "true" or obj == "false":
            return False

        # If it's a basic array, nope!
        if obj.startswith('[') and obj.endswith(']'):
            return False

        # json.loads considers strings that are numbers, valid json.
        try:
            int_obj = int(obj)
            if int_obj:
                return False
        # Do nothing if we catch an exception, That means this is json!
        except (NameError, ValueError):
            # json.loads considers strings that are numbers, valid json.
            try:
                fl_obj = float(obj)
                if fl_obj:
                    return False
            # Do nothing if we catch an exception, That means this is json!
            except (NameError, ValueError):
                return True

        return True

    @staticmethod
    def chunk_list(lst: List, chunk_size: int) -> List[List]:
        """Yield successive n-sized chunks from l."""
        for i in range(0, len(lst), chunk_size):
            yield lst[i:i + chunk_size]

    @staticmethod
    def parse_bool(value: Union[str, bool]) -> bool:
        if isinstance(value, bool):
            return bool(value)

        value = value.replace("'", '').replace('"', '').strip()
        if value.lower() == "true":
            return True
        elif value.lower() == "false":
            return False
        else:
            raise ValueError(
                f"Provided bool value of {value} is not a valid bool type.")

    @staticmethod
    def default_colors(enabled: bool = None) -> Color:
        if enabled is None:
            enabled = Utils.not_windows()

        return TerminalFactory(enabled).instance().get_colors()

    @staticmethod
    def to_env_var(variable_name: str):
        """
        converts aCasedVarable to A_CASED_VARIABLE case.
        """
        str1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', variable_name)
        return re.sub('([a-z0-9])([A-Z])', r'\1_\2', str1).upper()

    def get_config_key_safe(self, key: str, config: Dict, default=None):
        if key in config:
            return config[key]
        else:
            if default is None:
                return []
            else:
                return default

    @staticmethod
    def load_file(file_path: str) -> str:
        try:
            with open(file_path, 'r') as file:
                return file.read()
        except FileNotFoundError:
            print(
                f"Provided file path: {file_path} is invalid. No file found.")
            exit(1)

    @staticmethod
    def class_props(cls):
        return [i for i in cls.__dict__.keys() if i[:1] != '_']

    @staticmethod
    def property_matches(obj: Any, comparator: str) -> bool:
        props = [
            p for p in dir(obj)
            if not p.startswith('__') and not callable(getattr(obj, p))
        ]
        props = [p for p in props
                 if p != '_abc_impl']  # filter out abstract class stuff
        matching_attr = [
            p for p in props if comparator in str(getattr(obj, p))
        ]
        return bool(matching_attr)

    @staticmethod
    def safe_cast(val, to_type, default=None):
        try:
            return to_type(val)
        except (ValueError, TypeError):
            return default
Exemplo n.º 17
0
class Google:
    def __init__(self,
                 config,
                 save_failure,
                 defaults: CLIDefaults,
                 secrets_mgr: SecretsManager,
                 save_flow=False):
        """The Google object holds authentication state
        for a given session. You need to supply:
        username: FQDN Google username, eg [email protected]
        password: obvious
        idp_id: Google's assigned IdP identifier for your G-suite account
        sp_id: Google's assigned SP identifier for your AWS SAML app
        Optionally, you can supply:
        duration_seconds: number of seconds for the session to be active (max 43200)
        """

        self.version = VERSION
        self.config = config
        self.base_url = 'https://accounts.google.com'
        self.save_failure = save_failure
        self.session_state = None
        self._defaults = defaults
        self.save_flow = save_flow
        self._secrets_mgr = secrets_mgr
        self._out = Output(defaults.colors_enabled)

        if save_flow:
            self.save_flow_dict = {}
            self.save_flow_dir = "aws-google-auth-" + datetime.now().strftime(
                '%Y-%m-%dT%H%M%S')
            os.makedirs(self.save_flow_dir, exist_ok=True)

    @property
    def login_url(self):
        return self.base_url + "/o/saml2/initsso?idpid={}&spid={}&forceauthn=false".format(
            self.config.idp_id, self.config.sp_id)

    def check_for_failure(self, sess):

        if isinstance(sess.reason, bytes):
            # We attempt to decode utf-8 first because some servers
            # choose to localize their reason strings. If the string
            # isn't utf-8, we fall back to iso-8859-1 for all other
            # encodings. (See PR #3538)
            try:
                reason = sess.reason.decode('utf-8')
            except UnicodeDecodeError:
                reason = sess.reason.decode('iso-8859-1')
        else:
            reason = sess.reason

        if sess.status_code == 403:
            raise ExpectedGoogleException(u'{} accessing {}'.format(
                reason, sess.url))

        try:
            sess.raise_for_status()
        except HTTPError as ex:

            if self.save_failure:
                logging.exception("Saving failure trace in 'failure.html'", ex)
                with open("failure.html", 'w') as out:
                    out.write(sess.text)

            raise ex

        return sess

    def _save_file_name(self, url):
        filename = url.split('://')[1].split('?')[0].replace(
            "accounts.google", "ac.go").replace("/", "~")
        file_idx = self.save_flow_dict.get(filename, 1)
        self.save_flow_dict[filename] = file_idx + 1
        return filename + "_" + str(file_idx)

    def _save_request(self, url, method='GET', data=None, json_data=None):
        if self.save_flow:
            filename = self._save_file_name(url) + "_" + method + ".req"
            with open(os.path.join(self.save_flow_dir, filename),
                      'w',
                      encoding='utf-8') as out:
                try:
                    out.write("params=" + url.split('?')[1])
                except IndexError:
                    out.write("params=None")
                out.write(("\ndata: " + json.dumps(data, indent=2)).replace(
                    self.config.password, '<PASSWORD>'))
                out.write(
                    ("\njson: " + json.dumps(json_data, indent=2)).replace(
                        self.config.password, '<PASSWORD>'))

    def _save_response(self, url, response):
        if self.save_flow:
            filename = self._save_file_name(url) + ".html"
            with open(os.path.join(self.save_flow_dir, filename),
                      'w',
                      encoding='utf-8') as out:
                out.write(response.text)

    def post(self, url, data=None, json_data=None):
        try:
            self._save_request(url,
                               method='POST',
                               data=data,
                               json_data=json_data)
            response = self.check_for_failure(
                self.session.post(url, data=data, json=json_data))
            self._save_response(url, response)

        except requests.exceptions.ConnectionError as e:
            logging.exception(
                'There was a connection error, check your network settings.',
                e)
            sys.exit(1)
        except requests.exceptions.Timeout as e:
            logging.exception('The connection timed out, please try again.', e)
            sys.exit(1)
        except requests.exceptions.TooManyRedirects as e:
            logging.exception(
                'The number of redirects exceeded the maximum '
                'allowed.', e)
            sys.exit(1)

        return response

    def get(self, url):
        try:
            self._save_request(url)
            response = self.check_for_failure(self.session.get(url))
            self._save_response(url, response)

        except requests.exceptions.ConnectionError as e:
            logging.exception(
                'There was a connection error, check your network settings.',
                e)
            sys.exit(1)
        except requests.exceptions.Timeout as e:
            logging.exception('The connection timed out, please try again.', e)
            sys.exit(1)
        except requests.exceptions.TooManyRedirects as e:
            logging.exception(
                'The number of redirects exceeded the maximum '
                'allowed.', e)
            sys.exit(1)

        return response

    @staticmethod
    def parse_error_message(sess):
        response_page = BeautifulSoup(sess.text, 'html.parser')
        error = response_page.find('span', {'id': 'errorMsg'})

        if error is None:
            return None
        else:
            return error.text

    @staticmethod
    def find_key_handles(input, challengeTxt):
        keyHandles = []
        typeOfInput = type(input)
        if typeOfInput == dict:  # parse down a dict
            for item in input:
                keyHandles.extend(
                    Google.find_key_handles(input[item], challengeTxt))

        elif typeOfInput == list:  # looks like we've hit an array - iterate it
            array = list(filter(
                None, input))  # remove any None type objects from the array
            for item in array:
                typeValue = type(item)
                if typeValue == list:  # another array - recursive call
                    keyHandles.extend(
                        Google.find_key_handles(item, challengeTxt))
                elif typeValue == int or typeValue == bool:  # ints bools etc we don't care
                    continue
                else:  # we went a string or unicode here (python 3.x lost unicode global)
                    try:  # keyHandle string will be base64 encoded -
                        # if its not an exception is thrown and we continue as its not the string we're after
                        base64UrlEncoded = base64.urlsafe_b64encode(
                            base64.b64decode(item))
                        if base64UrlEncoded != challengeTxt:  # make sure its not the challengeTxt - if it not return it
                            keyHandles.append(base64UrlEncoded)
                    except:
                        pass
        return keyHandles

    @staticmethod
    def find_app_id(inputString):
        try:
            searchResult = re.search('"appid":"[a-z://.-_] + "',
                                     inputString).group()
            searchObject = json.loads('{' + searchResult + '}')
            return str(searchObject['appid'])
        except:
            logging.exception(
                'Was unable to find appid value in googles SAML page')
            sys.exit(1)

    def do_login(self):
        self.session = requests.Session()
        # self.session.headers['User-Agent'] = "AWS Sign-in/{} (aws-google-auth)".format(self.version)
        self.session.headers[
            'User-Agent'] = "aws-cli/1.16.215 Python/3.7.3 Linux/4.14.133-113.105.amzn2.x86_64 botocore/1.12.205"
        sess = self.get(self.login_url)

        # Collect information from the page source
        first_page = BeautifulSoup(sess.text, 'html.parser')
        # gxf = first_page.find('input', {'name': 'gxf'}).get('value')
        self.cont = first_page.find('input', {'name': 'continue'}).get('value')
        # page = first_page.find('input', {'name': 'Page'}).get('value')
        # sign_in = first_page.find('input', {'name': 'signIn'}).get('value')
        form = first_page.find('form', {'id': 'gaia_loginform'})
        account_login_url = form.get('action')

        payload = {}

        for tag in form.find_all('input'):
            if tag.get('name') is None:
                continue

            payload[tag.get('name')] = tag.get('value')

        payload['Email'] = self.config.username

        if self.config.bg_response:
            payload['bgresponse'] = self.config.bg_response

        if payload.get('PersistentCookie', None) is not None:
            payload['PersistentCookie'] = 'yes'

        if payload.get('TrustDevice', None) is not None:
            payload['TrustDevice'] = 'on'

        # POST to account login info page, to collect profile and session info
        sess = self.post(account_login_url, data=payload)

        self.session.headers['Referer'] = sess.url

        # Collect ProfileInformation, SessionState, signIn, and Password Challenge URL
        challenge_page = BeautifulSoup(sess.text, 'html.parser')

        # Handle the "old-style" page
        if challenge_page.find('form', {'id': 'gaia_loginform'}):
            form = challenge_page.find('form', {'id': 'gaia_loginform'})
            passwd_challenge_url = form.get('action')
        else:
            # sometimes they serve up a different page
            logging.debug("Handling new-style login page")
            form = challenge_page.find('form', {'id': 'challenge'})
            passwd_challenge_url = 'https://accounts.google.com' + form.get(
                'action')

        for tag in form.find_all('input'):
            if tag.get('name') is None:
                continue

            payload[tag.get('name')] = tag.get('value')

        # Update the payload
        payload['Passwd'] = self.config.password

        # POST to Authenticate Password
        sess = self.post(passwd_challenge_url, data=payload)

        response_page = BeautifulSoup(sess.text, 'html.parser')
        error = response_page.find(class_='error-msg')
        cap = response_page.find('input', {'name': 'identifier-captcha-input'})

        # Were there any errors logging in? Could be invalid username or password
        # There could also sometimes be a Captcha, which means Google thinks you,
        # or someone using the same outbound IP address as you, is a bot.
        if error is not None and cap is None:
            raise ExpectedGoogleException('Invalid username or password')

        if "signin/rejected" in sess.url:
            raise ExpectedGoogleException(
                u'''Default value of parameter `bgresponse` has not accepted.
                Please visit login URL {}, open the web inspector and execute document.bg.invoke() in the console.
                Then, set --bg-response to the function output.'''.format(
                    self.login_url))

        self.check_extra_step(response_page)

        # Process Google CAPTCHA verification request if present
        if cap is not None:
            self.session.headers['Referer'] = sess.url

            sess = self.handle_captcha(sess, payload)

            response_page = BeautifulSoup(sess.text, 'html.parser')
            error = response_page.find(class_='error-msg')
            cap = response_page.find('input', {'name': 'logincaptcha'})

            # Were there any errors logging in? Could be invalid username or password
            # There could also sometimes be a Captcha, which means Google thinks you,
            # or someone using the same outbound IP address as you, is a bot.
            if error is not None:
                raise ExpectedGoogleException('Invalid username or password')

            self.check_extra_step(response_page)

            if cap is not None:
                raise ExpectedGoogleException('Invalid captcha')

        self.session.headers['Referer'] = sess.url

        if "selectchallenge/" in sess.url:
            sess = self.handle_selectchallenge(sess)

        # Was there an MFA challenge?
        if "challenge/totp/" in sess.url:
            error_msg = ""
            while error_msg is not None:
                sess = self.handle_totp(sess)
                error_msg = self.parse_error_message(sess)
                if error_msg is not None:
                    logging.error(error_msg)
        elif "challenge/ipp/" in sess.url:
            sess = self.handle_sms(sess)
        elif "challenge/az/" in sess.url:
            sess = self.handle_prompt(sess)
        elif "challenge/sk/" in sess.url:
            sess = self.handle_sk(sess)
        elif "challenge/iap/" in sess.url:
            sess = self.handle_iap(sess)
        elif "challenge/dp/" in sess.url:
            sess = self.handle_dp(sess)
        elif "challenge/ootp/5" in sess.url:
            raise NotImplementedError(
                'Offline Google App OOTP not implemented')

        # ... there are different URLs for backup codes (printed)
        # and security keys (eg yubikey) as well
        # save for later
        self.session_state = sess

    @staticmethod
    def check_extra_step(response):
        extra_step = response.find(
            text='This extra step shows that it’s really you trying to sign in'
        )
        if extra_step:
            if response.find(id='contactAdminMessage'):
                raise ValueError(response.find(id='contactAdminMessage').text)

    def parse_saml(self):
        if self.session_state is None:
            self.do_login()

        parsed = BeautifulSoup(self.session_state.text, 'html.parser')
        try:
            saml_element = parsed.find('input', {
                'name': 'SAMLResponse'
            }).get('value')
        except:

            if self.save_failure:
                logging.error("SAML lookup failed, storing failure page to "
                              "'saml.html' to assist with debugging.")
                with open("saml.html", 'wb') as out:
                    out.write(self.session_state.text.encode('utf-8'))

            raise ExpectedGoogleException(
                'Something went wrong - Could not find SAML response, check your credentials or use --save-failure-html to debug.'
            )

        return base64.b64decode(saml_element)

    def handle_captcha(self, sess, payload):
        response_page = BeautifulSoup(sess.text, 'html.parser')

        # Collect ProfileInformation, SessionState, signIn, and Password Challenge URL
        profile_information = response_page.find('input', {
            'name': 'ProfileInformation'
        }).get('value')
        session_state = response_page.find('input', {
            'name': 'SessionState'
        }).get('value')
        sign_in = response_page.find('input', {'name': 'signIn'}).get('value')
        passwd_challenge_url = response_page.find('form', {
            'id': 'gaia_loginform'
        }).get('action')

        # Update the payload
        payload['SessionState'] = session_state
        payload['ProfileInformation'] = profile_information
        payload['signIn'] = sign_in
        payload['Passwd'] = self.config.password

        # Get all captcha challenge tokens and urls
        captcha_container = response_page.find('div',
                                               {'id': 'identifier-captcha'})
        captcha_logintoken = captcha_container.find('input', {
            'id': 'identifier-token'
        }).get('value')
        captcha_img = captcha_container.find('div', {'class': 'captcha-img'})
        captcha_url = "https://accounts.google.com" + captcha_img.find(
            'img').get('src')
        captcha_logintoken_audio = ''

        open_image = True

        # Check if there is a display utility installed as Image.open(f).show() do not raise any exception if not
        # if neither xv or display are available just display the URL for the user to visit.
        if os.name == 'posix' and sys.platform != 'darwin':
            if find_executable('xv') is None and find_executable(
                    'display') is None:
                open_image = False

        self._out.print(
            f"\nPlease visit the following URL to view your CAPTCHA: {captcha_url}"
        )
        self._out.notify(
            "\n\nTo avoid having to enter captchas in the future, check your gmail and authorize the new "
            "device.")

        if open_image:
            try:
                with requests.get(captcha_url) as url:
                    with io.BytesIO(url.content) as f:
                        Image.open(f).show()
            except Exception:
                pass

        try:
            captcha_input = raw_input("Captcha (case insensitive): ") or None
        except NameError:
            captcha_input = input("Captcha (case insensitive): ") or None

        # Update the payload
        payload['identifier-captcha-input'] = captcha_input
        payload['identifiertoken'] = captcha_logintoken
        payload['identifiertoken_audio'] = captcha_logintoken_audio
        payload['checkedDomains'] = 'youtube'
        payload['checkConnection'] = 'youtube:574:1'
        payload['Email'] = self.config.username

        response = self.post(passwd_challenge_url, data=payload)

        newPayload = {}

        auth_response_page = BeautifulSoup(response.text, 'html.parser')
        form = auth_response_page.find('form')
        for tag in form.find_all('input'):
            if tag.get('name') is None:
                continue

            newPayload[tag.get('name')] = tag.get('value')

        newPayload['Email'] = self.config.username
        newPayload['Passwd'] = self.config.password

        if newPayload.get('TrustDevice', None) is not None:
            newPayload['TrustDevice'] = 'on'

        return self.post(response.url, data=newPayload)

    def handle_sk(self, sess):
        response_page = BeautifulSoup(sess.text, 'html.parser')
        challenge_url = sess.url.split("?")[0]
        challenges_txt = response_page.find('input', {
            'name': "id-challenge"
        }).get('value')

        facet_url = urllib_parse.urlparse(challenge_url)
        facet = facet_url.scheme + "://" + facet_url.netloc

        keyHandleJSField = response_page.find('div', {
            'jsname': 'C0oDBd'
        }).get('data-challenge-ui')
        startJSONPosition = keyHandleJSField.find('{')
        endJSONPosition = keyHandleJSField.rfind('}')
        keyHandleJsonPayload = json.loads(
            keyHandleJSField[startJSONPosition:endJSONPosition + 1])

        keyHandles = self.find_key_handles(
            keyHandleJsonPayload,
            base64.urlsafe_b64encode(base64.b64decode(challenges_txt)))
        appId = self.find_app_id(str(keyHandleJsonPayload))

        # txt sent for signing needs to be base64 url encode
        # we also have to remove any base64 padding because including including it will prevent google accepting the auth response
        challenges_txt_encode_pad_removed = base64.urlsafe_b64encode(
            base64.b64decode(challenges_txt)).strip('='.encode())

        u2f_challenges = [{
            'version':
            'U2F_V2',
            'challenge':
            challenges_txt_encode_pad_removed.decode(),
            'appId':
            appId,
            'keyHandle':
            keyHandle.decode()
        } for keyHandle in keyHandles]

        # Prompt the user up to attempts_remaining times to insert their U2F device.
        attempts_remaining = 5
        auth_response = None
        while True:
            try:
                auth_response_dict = u2f.u2f_auth(u2f_challenges, facet)
                auth_response = json.dumps(auth_response_dict)
                break
            except RuntimeWarning:
                logging.error("No U2F device found. %d attempts remaining",
                              attempts_remaining)
                if attempts_remaining <= 0:
                    break
                else:
                    input(
                        "Insert your U2F device and press enter to try again..."
                    )
                    attempts_remaining -= 1

        # If we exceed the number of attempts, raise an error and let the program exit.
        if auth_response is None:
            raise ExpectedGoogleException(
                "No U2F device found. Please check your setup.")

        payload = {
            'challengeId':
            response_page.find('input', {
                'name': 'challengeId'
            }).get('value'),
            'challengeType':
            response_page.find('input', {
                'name': 'challengeType'
            }).get('value'),
            'continue':
            response_page.find('input', {
                'name': 'continue'
            }).get('value'),
            'scc':
            response_page.find('input', {
                'name': 'scc'
            }).get('value'),
            'sarp':
            response_page.find('input', {
                'name': 'sarp'
            }).get('value'),
            'checkedDomains':
            response_page.find('input', {
                'name': 'checkedDomains'
            }).get('value'),
            'pstMsg':
            '1',
            'TL':
            response_page.find('input', {
                'name': 'TL'
            }).get('value'),
            'gxf':
            response_page.find('input', {
                'name': 'gxf'
            }).get('value'),
            'id-challenge':
            challenges_txt,
            'id-assertion':
            auth_response,
            'TrustDevice':
            'on',
        }
        return self.post(challenge_url, data=payload)

    def handle_sms(self, sess):
        response_page = BeautifulSoup(sess.text, 'html.parser')
        challenge_url = sess.url.split("?")[0]

        sms_token = input("Enter SMS token: G-") or None

        challenge_form = response_page.find('form')
        payload = {}
        for tag in challenge_form.find_all('input'):
            if tag.get('name') is None:
                continue

            payload[tag.get('name')] = tag.get('value')

        if response_page.find('input', {'name': 'TrustDevice'}) is not None:
            payload['TrustDevice'] = 'on'

        payload['Pin'] = sms_token

        try:
            del payload['SendMethod']
        except KeyError:
            pass

        # Submit IPP (SMS code)
        return self.post(challenge_url, data=payload)

    def handle_prompt(self, sess):
        response_page = BeautifulSoup(sess.text, 'html.parser')
        challenge_url = sess.url.split("?")[0]

        data_key = response_page.find('div', {
            'data-api-key': True
        }).get('data-api-key')
        data_tx_id = response_page.find('div', {
            'data-tx-id': True
        }).get('data-tx-id')

        # Need to post this to the verification/pause endpoint
        await_url = "https://content.googleapis.com/cryptauth/v1/authzen/awaittx?alt=json&key={}".format(
            data_key)
        await_body = {'txId': data_tx_id}

        self.check_prompt_code(response_page)

        self._out.print(
            "Open the Google App, and tap 'Yes' on the prompt to sign in ...")

        self.session.headers['Referer'] = sess.url

        retry = True
        response = None
        while retry:
            try:
                response = self.post(await_url, json_data=await_body)
                retry = False
            except requests.exceptions.HTTPError as ex:

                if not ex.response.status_code == 500:
                    raise ex

        parsed_response = json.loads(response.text)

        payload = {
            'challengeId':
            response_page.find('input', {
                'name': 'challengeId'
            }).get('value'),
            'challengeType':
            response_page.find('input', {
                'name': 'challengeType'
            }).get('value'),
            'continue':
            response_page.find('input', {
                'name': 'continue'
            }).get('value'),
            'scc':
            response_page.find('input', {
                'name': 'scc'
            }).get('value'),
            'sarp':
            response_page.find('input', {
                'name': 'sarp'
            }).get('value'),
            'checkedDomains':
            response_page.find('input', {
                'name': 'checkedDomains'
            }).get('value'),
            'checkConnection':
            'youtube:1295:1',
            'pstMsg':
            response_page.find('input', {
                'name': 'pstMsg'
            }).get('value'),
            'TL':
            response_page.find('input', {
                'name': 'TL'
            }).get('value'),
            'gxf':
            response_page.find('input', {
                'name': 'gxf'
            }).get('value'),
            'token':
            parsed_response['txToken'],
            'action':
            response_page.find('input', {
                'name': 'action'
            }).get('value'),
            'TrustDevice':
            'on',
        }

        return self.post(challenge_url, data=payload)

    @staticmethod
    def check_prompt_code(response):
        """
        Sometimes there is an additional numerical code on the response page that needs to be selected
        on the prompt from a list of multiple choice. Print it if it's there.
        """
        num_code = response.find("div", {"jsname": "EKvSSd"})
        if num_code:
            self._out.print("numerical code for prompt: {}".format(
                num_code.string))

    def handle_totp(self, sess):
        response_page = BeautifulSoup(sess.text, 'html.parser')
        tl = response_page.find('input', {'name': 'TL'}).get('value')
        gxf = response_page.find('input', {'name': 'gxf'}).get('value')
        challenge_url = sess.url.split("?")[0]
        challenge_id = challenge_url.split("totp/")[1]

        if self._defaults.mfa_enabled:
            color = Utils.default_colors(
            ) if self._defaults.colors_enabled else None
            mfa_token = self._secrets_mgr.get_next_mfa(self._defaults.user) if self._defaults.auto_mfa else \
                Input.get_mfa(display_hint=True, color=color)
        else:
            mfa_token = None

        if not mfa_token:
            raise ValueError(
                "MFA token required for {} but none supplied.".format(
                    self.config.username))

        payload = {
            'challengeId': challenge_id,
            'challengeType': 6,
            'continue': self.cont,
            'scc': 1,
            'sarp': 1,
            'checkedDomains': 'youtube',
            'pstMsg': 0,
            'TL': tl,
            'gxf': gxf,
            'Pin': mfa_token,
            'TrustDevice': 'on',
        }

        # Submit TOTP
        return self.post(challenge_url, data=payload)

    def handle_dp(self, sess):
        response_page = BeautifulSoup(sess.text, 'html.parser')

        input(
            "Check your phone - after you have confirmed response press ENTER to continue."
        ) or None

        form = response_page.find('form', {'id': 'challenge'})
        challenge_url = 'https://accounts.google.com' + form.get('action')

        payload = {}
        for tag in form.find_all('input'):
            if tag.get('name') is None:
                continue

            payload[tag.get('name')] = tag.get('value')

        # Submit Configuration
        return self.post(challenge_url, data=payload)

    def handle_iap(self, sess):
        response_page = BeautifulSoup(sess.text, 'html.parser')
        challenge_url = sess.url.split("?")[0]
        phone_number = input('Enter your phone number:') or None

        while True:
            try:
                choice = int(
                    input(
                        'Type 1 to receive a code by SMS or 2 for a voice call:'
                    ))
                if choice not in [1, 2]:
                    raise ValueError
            except ValueError:
                logging.error("Not a valid (integer) option, try again")
                continue
            else:
                if choice == 1:
                    send_method = 'SMS'
                elif choice == 2:
                    send_method = 'VOICE'
                else:
                    continue
                break

        payload = {
            'challengeId':
            response_page.find('input', {
                'name': 'challengeId'
            }).get('value'),
            'challengeType':
            response_page.find('input', {
                'name': 'challengeType'
            }).get('value'),
            'continue':
            self.cont,
            'scc':
            response_page.find('input', {
                'name': 'scc'
            }).get('value'),
            'sarp':
            response_page.find('input', {
                'name': 'sarp'
            }).get('value'),
            'checkedDomains':
            response_page.find('input', {
                'name': 'checkedDomains'
            }).get('value'),
            'pstMsg':
            response_page.find('input', {
                'name': 'pstMsg'
            }).get('value'),
            'TL':
            response_page.find('input', {
                'name': 'TL'
            }).get('value'),
            'gxf':
            response_page.find('input', {
                'name': 'gxf'
            }).get('value'),
            'phoneNumber':
            phone_number,
            'sendMethod':
            send_method,
        }

        # Submit phone number and desired method (SMS or voice call)
        sess = self.post(challenge_url, data=payload)

        response_page = BeautifulSoup(sess.text, 'html.parser')
        challenge_url = sess.url.split("?")[0]

        token = input("Enter " + send_method + " token: G-") or None

        payload = {
            'challengeId':
            response_page.find('input', {
                'name': 'challengeId'
            }).get('value'),
            'challengeType':
            response_page.find('input', {
                'name': 'challengeType'
            }).get('value'),
            'continue':
            response_page.find('input', {
                'name': 'continue'
            }).get('value'),
            'scc':
            response_page.find('input', {
                'name': 'scc'
            }).get('value'),
            'sarp':
            response_page.find('input', {
                'name': 'sarp'
            }).get('value'),
            'checkedDomains':
            response_page.find('input', {
                'name': 'checkedDomains'
            }).get('value'),
            'pstMsg':
            response_page.find('input', {
                'name': 'pstMsg'
            }).get('value'),
            'TL':
            response_page.find('input', {
                'name': 'TL'
            }).get('value'),
            'gxf':
            response_page.find('input', {
                'name': 'gxf'
            }).get('value'),
            'pin':
            token,
        }

        # Submit SMS/VOICE token
        return self.post(challenge_url, data=payload)

    def handle_selectchallenge(self, sess):
        response_page = BeautifulSoup(sess.text, 'html.parser')

        challenges = []
        for i in response_page.select('form[data-challengeentry]'):
            action = i.attrs.get("action")

            if "challenge/totp/" in action:
                challenges.append([
                    'TOTP (Google Authenticator)',
                    i.attrs.get("data-challengeentry")
                ])
            elif "challenge/ipp/" in action:
                challenges.append(['SMS', i.attrs.get("data-challengeentry")])
            elif "challenge/iap/" in action:
                challenges.append(
                    ['SMS other phone',
                     i.attrs.get("data-challengeentry")])
            elif "challenge/sk/" in action:
                challenges.append(
                    ['YubiKey', i.attrs.get("data-challengeentry")])
            elif "challenge/az/" in action:
                challenges.append(
                    ['Google Prompt',
                     i.attrs.get("data-challengeentry")])

        self._out.print('Choose MFA method from available:')
        for i, mfa in enumerate(challenges, start=1):
            self._out.print("{}: {}".format(i, mfa[0]))

        selected_challenge = input("Enter MFA choice number (1): ") or None

        if selected_challenge is not None and int(selected_challenge) <= len(
                challenges):
            selected_challenge = int(selected_challenge) - 1
        else:
            selected_challenge = 0

        challenge_id = challenges[selected_challenge][1]
        self._out.print("MFA Type Chosen: {}".format(
            challenges[selected_challenge][0]))

        # We need the specific form of the challenge chosen
        challenge_form = response_page.find(
            'form', {'data-challengeentry': challenge_id})

        payload = {}
        for tag in challenge_form.find_all('input'):
            if tag.get('name') is None:
                continue

            payload[tag.get('name')] = tag.get('value')

        if response_page.find('input', {'name': 'TrustDevice'}) is not None:
            payload['TrustDevice'] = 'on'

        # POST to google with the chosen challenge
        return self.post(self.base_url + challenge_form.get('action'),
                         data=payload)
Exemplo n.º 18
0
class Sync(ConfigCommand):
    """
    Synchronizes local application configuration state as defined in the figgy.json file and the existing remote state
    in the targeted environment. Also configures replication for designated shared parameters in the
    figgy.json file.
    """
    def __init__(self, ssm_init: SsmDao, config_init: ConfigDao,
                 repl_dao: ReplicationDao, colors_enabled: bool,
                 context: ConfigContext, get: Get, put: Put):
        super().__init__(sync, colors_enabled, context)
        self._config = config_init
        self._ssm = ssm_init
        self._repl = repl_dao
        self._config_path = context.ci_config_path if context.ci_config_path else Utils.find_figgy_json(
        )
        self._utils = Utils(colors_enabled)
        self._replication_only = context.replication_only
        self._errors_detected = False
        self.example = f"{self.c.fg_bl}{CLI_NAME} config {self.command_printable} " \
                       f"--env dev --config /path/to/config{self.c.rs}"
        self._get: Get = get
        self._put: Put = put
        self._FILE_PREFIX = "file://"
        self._out = Output(colors_enabled)

    def _input_config_values(self, config_keys: Set[str]) -> None:
        """
        Prompts the user for each of the passed in set of config values if any are missing from PS.
        :param config_keys: Set[string] - config values to prompt the user to add.
        """
        def validate_msg(ps_name: str):
            self._out.success(f"Name Validated: [[{ps_name}]]")
            return validate_msg

        count = 0
        for key in config_keys:
            try:
                if not self._get.get(key):
                    self._out.warn(
                        f"Fig: [[{key}]] missing from PS in environment: [[{self.run_env}]]."
                    )
                    self._put.put_param(key=key, display_hints=False)
                    count = count + 1
                else:
                    validate_msg(key)
            except ClientError:
                validate_msg(key)

        if count:
            self._out.success(
                f"[[{count}]] {'value' if count == 1 else 'values'} added successfully"
            )

    def _sync_keys(self, config_namespace: str, all_keys: Set):
        """
        Looks for stray parameters (keys) under the namespace provided and prints out information about
        missing parameters that are not defined in the figgy.json file
        Args:
            config_namespace: Namespace to query PS under.
            all_keys: All keys that exist in figgy.json to compare against.
        """
        self._out.notify(f"Checking for stray config names.")

        # Find & Prune stray keys
        ps_keys = set(
            list(
                map(lambda x: x['Name'],
                    self._ssm.get_all_parameters([config_namespace]))))
        ps_only_keys = ps_keys.difference(all_keys)

        UNUSED_CONFIG_DETECTED = f"%%red%%The following Names were found in PS but are not referenced in your configurations. \n" \
                                 f"Use the %%rs%%%%blue%%`prune`%%rs%%%%red%% command to clean them up once all " \
                                 f"deployed application versions no longer use these configurations: %%rs%%"

        if len(ps_only_keys) > 0:
            self._out.warn(
                "The following Names were found in PS but are not referenced in your configurations. \n"
                "Use the [[prune]] command to clean them up once all.")

        for key in ps_only_keys:
            self._out.print(f"Unused Parameter: [[{key}]]")

        if not ps_only_keys:
            self._out.success(f"No stray configurations found.")

    def _sync_repl_configs(self,
                           config_repl: Dict,
                           namespace: str = None) -> None:
        """
        Syncs replication configs from a defined "replicate_figs" block parsed from either the figgy.json file
        or the data replication config json file.
        Args:
            config_repl: Dict of KV Pairs for a repl config. Source -> Dest
            namespace: Optional namespace. Parsed from destination if not supplied.
        """
        local_configs: List[ReplicationConfig] = ReplicationConfig.from_dict(
            conf=config_repl,
            type=ReplicationType(REPL_TYPE_APP),
            run_env=self.run_env,
            namespace=namespace)
        for l_cfg in local_configs:
            # Namespace will be missing for --replication-only syncs. Otherwise, with standard syncs, namespace is passed
            # as a parameter here.
            if not namespace:
                namespace = l_cfg.namespace

            if not l_cfg.destination.startswith(namespace):
                self._out.error(
                    f"Replication config [[{l_cfg.source} -> {l_cfg.destination}]] has a destination that "
                    f"is not in your service namespace: [[{namespace}]]. This is invalid."
                )
                self.errors_detected = True
                continue

            remote_cfg = self._repl.get_config_repl(l_cfg.destination)

            # Should never happen, except when someone manually deletes source / destination without going through CLI
            missing_from_ps = self.__get_param_encrypted(l_cfg.source) is None

            if not remote_cfg or remote_cfg != l_cfg or missing_from_ps:
                try:
                    if self._can_replicate_from(
                            l_cfg.source
                    ) and not remote_cfg or missing_from_ps:
                        self._repl.put_config_repl(l_cfg)
                        self._out.print(
                            f"[[Replication added:]] {l_cfg.source} -> {l_cfg.destination}"
                        )
                    elif self._can_replicate_from(l_cfg.source) and remote_cfg:
                        self._repl.put_config_repl(l_cfg)
                        self._out.notify(f"Replication updated.")
                        self._out.warn(
                            f"Removed: {remote_cfg.source} -> {remote_cfg.destination}"
                        )
                        self._out.success(
                            f"Added: {l_cfg.source} -> {l_cfg.destination}")
                    else:
                        self._errors_detected = True
                        # print(f"{self.c.fg_rd}You do not have permission to configure replication from source:"
                        #       f"{self.c.rs} {key}")
                except ClientError:
                    self._utils.validate(
                        False,
                        f"Error detected when attempting to store replication config "
                        f"for {l_cfg.destination}")
                    self._errors_detected = True
            else:
                self._out.success(
                    f"Replication Validated: [[{l_cfg.source} -> {l_cfg.destination}]]"
                )

    def _notify_of_data_repl_orphans(self, config_repl: Dict) -> None:
        """
        Notify user of detected stray replication configurations when using the --replication-only flag.
        :param config_repl: replication configuration block.
        """
        strays: Set[ReplicationConfig] = set()
        notify = False
        for repl in config_repl:
            namespace = self._utils.parse_namespace(config_repl[repl])
            remote_cfgs = self._repl.get_all_configs(namespace)

            if remote_cfgs:
                for cfg in remote_cfgs:
                    if cfg.source not in list(config_repl.keys()) \
                            and cfg.type == REPL_TYPE_APP \
                            and not cfg.source.startswith(shared_ns) \
                            and not cfg.source.startswith(self.context.defaults.service_ns):
                        strays.add(cfg)
                        notify = True

        for stray in strays:
            print(
                f"{self.c.fg_yl}stray replication mapping detected: {self.c.rs}"
                f" {self.c.fg_bl}{stray.source} -> {stray.destination}{self.c.rs}."
            )
        if notify:
            print(
                f"To prune stray replication configs, "
                f"delete the destination, THEN the source with the `figgy config delete` command"
            )

    def _sync_replication(self, config_repl: Dict, expected_destinations: Set,
                          namespace: str):
        """
        Calls sync_repl_configs which adds/removes repl configs. Then searches for stray configurations and notifies
        the user of detected stray configurations.
        Args:
            config_repl: Dict of KV Pairs for a repl config. Source -> Dest
            expected_destinations: expected replication destinations, as defined in merge key sources,
             or shared_figs
            namespace: Namespace to sync replication configs to. E.g. /app/demo-time/
        """

        self._out.notify(f"Validating replication for all parameters.")

        self._sync_repl_configs(config_repl, namespace=namespace)
        self._out.notify(f"\nChecking for stray replication configurations.")
        remote_cfgs = self._repl.get_all_configs(namespace)
        notify = True
        if remote_cfgs:
            for cfg in remote_cfgs:
                if cfg.source not in list(config_repl.keys()) \
                        and cfg.destination not in list(config_repl.values()) \
                        and cfg.destination not in expected_destinations \
                        and (isinstance(cfg.source, list)
                             or cfg.source.startswith(shared_ns) or cfg.source.startswith(
                            self.context.defaults.service_ns)):
                    print(
                        f"{self.c.fg_rd}Stray replication mapping detected: {self.c.rs}"
                        f" {self.c.fg_bl}{cfg.source} -> {cfg.destination}{self.c.rs}."
                    )
                    notify = False
        if notify:
            self._out.success(
                f"No stray replication configs found for: {namespace}")
        else:
            self._out.warn(f"{CLEANUP_REPLICA_ORPHANS}")

    def _validate_merge_keys(self, destination: str, sources: Union[List, str],
                             namespace: str) -> bool:
        """
        Validates merge key sources & destinations
        Args:
            destination: str -> Destination of merge key replication
            sources: List or Str -> Source(e) of this merge key
            namespace: application namespace
        """
        if not destination.startswith(namespace):
            print(
                f"{self.c.fg_rd}Merge config: {self.c.rs}{self.c.fg_bl}{destination}{self.c.rs}{self.c.fg_rd} has a "
                f"destination that is not in your service namespace: "
                f"{self.c.rs}{self.c.fg_bl}{namespace}{self.c.rs}{self.c.fg_rd}. This is invalid.{self.c.rs}"
            )
            self.errors_detected = True
            return False

        if isinstance(sources, list):
            for item in sources:
                if item.startswith(MERGE_KEY_PREFIX):
                    self._utils.validate(
                        item.replace(MERGE_KEY_PREFIX,
                                     "").startswith(namespace),
                        f"Source: {item} in merge config must begin with your namespace: {namespace}."
                    )
                    self.errors_detected = True
                    return False
        else:
            self._utils.validate(
                sources.startswith(namespace),
                f"Source {sources} in merge config must begin with your namespace: {namespace}"
            )
            self.errors_detected = True
            return False

        return True

    def _sync_merge_keys(self, config_merge: Dict, namespace: str) -> None:
        """
            Pushes merge key configs into replication config table.
        Args:
            config_merge: Dict of merge_parameters parsed from figcli.json file
            namespace: namespace for app
        """
        self._out.notify("Validating replication for all merge keys.")
        for key in config_merge:
            self._validate_merge_keys(key, config_merge[key], namespace)

            config = self._repl.get_config_repl(key)
            if not config or (config.source != config_merge[key]):
                try:
                    repl_config = ReplicationConfig(
                        destination=key,
                        run_env=self.run_env,
                        namespace=namespace,
                        source=config_merge[key],
                        type=ReplicationType(REPL_TYPE_MERGE))
                    self._repl.put_config_repl(repl_config)
                except ClientError:
                    self._utils.validate(
                        False,
                        f"Error detected when attempting to store replication config for {key}"
                    )
                    self._errors_detected = True
            else:
                self._out.success(
                    f"Merge key replication config validated: [[{key}]]")

    def _validate_expected_names(self, all_names: Set, repl_conf: Dict,
                                 merge_conf: Dict):
        self._out.notify(f"Validating shared keys exist.")
        print_resolution_message = False
        merged_confs = {**repl_conf, **merge_conf}
        for name in all_names:
            if self.__get_param_encrypted(name) is None:
                awaiting_repl = False
                for cnf in merged_confs:
                    if name == cnf or name in list(repl_conf.values()):
                        self._out.print(
                            f"\nConfig value [[{name}]] is a destination for replication, but doesn't exist"
                            f" yet. If you commit now your build could fail. This will auto-resolve itself if all of "
                            f"its dependencies exist. This will probably resolve itself in a few seconds. "
                            f"Try re-running sync.")
                        awaiting_repl = True
                        break

                if not awaiting_repl:
                    self._out.print(
                        f"Config value of [[{name}]] does not exist and is expected based on "
                        f"your defined configuration.")
                    print_resolution_message = True
                    self._errors_detected = True

        if print_resolution_message:
            self._out.error(f"{SHARED_NAME_RESOLUTION_MESSAGE}")
        else:
            self._out.success("Shared keys have been validated.")

    def _can_replicate_from(self, source: str):
        try:
            if self.__get_param_encrypted(source) is not None:
                return True
            else:
                self._out.warn(
                    f"Replication source: [[{source}]] is missing from ParameterStore. "
                    f"It must be added before config replication can be configured.\n"
                )
                self._input_config_values({source})
                return True
        except ClientError as e:
            denied = "AccessDeniedException" == e.response['Error']['Code']
            if denied and "AWSKMS; Status Code: 400;" in e.response['Error'][
                    'Message']:
                self._out.error(
                    f"You do not have access to decrypt the value of Name: [[{source}]]"
                )
            elif denied:
                self._out.error(
                    f"You do not have access to Parameter: [[{source}]]")
            else:
                raise
        return False

    def __get_param_encrypted(self, source: str) -> Optional[str]:
        try:
            return self._ssm.get_parameter_encrypted(source)
        except ClientError as e:
            denied = "AccessDeniedException" == e.response['Error']['Code']
            if denied and "AWSKMS; Status Code: 400;" in e.response['Error'][
                    'Message']:
                self._out.error(
                    f"You do not have access to decrypt the value of Name: [[{source}]]"
                )
                return None
            elif denied:
                self._utils.error_exit(
                    f"You do not have access to Parameter: {source}")
            else:
                raise

    def _validate_replication_config(self,
                                     config_repl: Dict,
                                     app_conf: bool = True):
        """
        Validates replication config blocks are valid / legal. Prevents people from setting up replication from
        disallowed namespaces, etc. Exits with error if invalid config is discovered.

        Args:
            config_repl: Dict of KV Pairs for a repl config. Source -> Dest
            app_conf: bool: T/F - True if this is an application config block in an application config (figgy.json).
                    False if other, which for now is only repl-configs for data teams.
        """
        for key in config_repl:
            if app_conf:
                self._utils.validate(
                    re.match(
                        f'^/shared/.*$|^{self.context.defaults.service_ns}/.*$',
                        key) is not None,
                    f"The SOURCE of your replication configs must begin with `/shared/` or "
                    f"`{self.context.defaults.service_ns}/`. "
                    f"{key} is non compliant.")

            self._utils.validate(
                re.match(f'^{self.context.defaults.service_ns}/.*$',
                         config_repl[key]) is not None,
                f"The DESTINATION of your replication configs must always begin with "
                f"`{self.context.defaults.service_ns}/`")

    def _find_missing_shared_figs(self, namespace: str, config_repl: Dict,
                                  shared_names: set, merge_conf: Dict):
        """
            Notifies the user if there is a parameter that has been shared into their namespace by an outside party
            but they have not added it to the `shared_figs` block of their figgy.json
        """
        all_repl_cfgs = self._repl.get_all_configs(namespace)
        for cfg in all_repl_cfgs:
            in_merge_conf = self._in_merge_value(cfg.destination, merge_conf)

            if cfg.destination not in shared_names and cfg.type == REPL_TYPE_APP \
                    and cfg.destination not in config_repl.values() and not in_merge_conf:
                print(
                    f"It appears that {self.c.fg_bl}{cfg.user}{self.c.rs} shared "
                    f"{self.c.fg_bl}{cfg.source}{self.c.rs} to {self.c.fg_bl}{cfg.destination}{self.c.rs} "
                    f"and you have not added {self.c.fg_bl}{cfg.destination}{self.c.rs} to the "
                    f"{self.c.fg_bl}{SHARED_KEY}{self.c.rs} section of your figgy.json. This is also not "
                    f"referenced in any defined merge parameter. Please add "
                    f"{self.c.fg_bl}{cfg.destination}{self.c.rs} to your figgy.json, or delete this parameter "
                    f"and the replication config with the prune command.")

    def _in_merge_value(self, dest: str, merge_conf: Dict):
        for key in merge_conf:
            value = merge_conf[key]
            # 'value' can be a list or a str, but the way 'in' operates, this works either way. #dynamic programming
            for suffix in merge_suffixes:
                if f"${'{'}{dest}{suffix}{'}'}" in value:
                    return True

        return False

    def _fill_repl_conf_variables(self, repl_conf: Dict) -> Dict:
        repl_copy = {}
        all_vars = []
        for key, val in repl_conf.items():
            all_vars = all_vars + re.findall(r'\${(\w+)}', key)
            all_vars = all_vars + re.findall(r'\${(\w+)}', key)

        all_vars = set(all_vars)
        if all_vars:
            print(
                f"{self.c.fg_bl}{len(all_vars)} variables detected in: {self.c.rs}{self.c.fg_yl}"
                f"{self._config_path}{self.c.rs}\n")

        template_vals = {}
        for var in all_vars:
            print(f"Template variable: {self.c.fg_bl}{var}{self.c.rs} found.")
            input_val = Input.input(
                f"Please input a value for {self.c.fg_bl}{var}{self.c.rs}: ",
                min_length=1)
            template_vals[var] = input_val

        for key, val in repl_conf.items():
            updated_key = key
            updated_val = val

            for template_key, template_val in template_vals.items():
                updated_key = updated_key.replace(f"${{{template_key}}}",
                                                  template_val)
                updated_val = updated_val.replace(f"${{{template_key}}}",
                                                  template_val)

            repl_copy[updated_key] = updated_val
            repl_copy[updated_key] = updated_val

        return repl_copy

    def run_ci_sync(self) -> None:
        """
            Orchestrates a standard `sync` command WITHOUT The `--replication-only` flag set.
        """
        # Validate & parse figgy.json
        config = self._utils.get_ci_config(self._config_path)
        shared_names = set(
            self._utils.get_config_key_safe(SHARED_KEY, config, default=[]))
        repl_conf = self._utils.get_config_key_safe(REPLICATION_KEY,
                                                    config,
                                                    default={})
        repl_from_conf = self._utils.get_config_key_safe(REPL_FROM_KEY,
                                                         config,
                                                         default={})
        merge_conf = self._utils.get_config_key_safe(MERGE_KEY,
                                                     config,
                                                     default={})
        config_keys = set(
            self._utils.get_config_key_safe(CONFIG_KEY, config, default=[]))
        namespace = self._utils.get_namespace(config)
        merge_keys = set(merge_conf.keys())
        all_keys = KeyUtils.find_all_expected_names(config_keys, shared_names,
                                                    merge_conf, repl_conf,
                                                    repl_from_conf, namespace)

        repl_conf = KeyUtils.merge_repl_and_repl_from_blocks(
            repl_conf, repl_from_conf, namespace)
        # Add missing config values
        self._out.notify(
            f"Validating all configuration keys exist in ParameterStore.")
        self._input_config_values(config_keys)

        # Sync keys between PS / Local config
        print()
        self._sync_keys(namespace, all_keys)

        print()

        self._find_missing_shared_figs(namespace, repl_conf, shared_names,
                                       merge_conf)

        # Disabling requirement (for now) of replication to be in /replicated path
        # print()
        self._validate_replication_config(repl_conf, app_conf=True)

        print()
        # sync replication config
        all_shared_keys = shared_names | set(merge_conf.keys())
        self._sync_replication(repl_conf, all_shared_keys, namespace)

        print()
        self._sync_merge_keys(merge_conf, namespace)

        print()
        # validate expected keys exist
        self._validate_expected_names(all_keys, repl_conf, merge_conf)

    def run_repl_sync(self) -> None:
        """
        Orchestrates sync when the user passes in the `--replication-only` flag.
        """
        self._utils.validate(
            os.path.exists(self._config_path),
            f"Path {self._config_path} is invalid. "
            f"That file does not exist.")
        repl_conf = self._utils.get_repl_config(self._config_path)

        repl_conf = self._fill_repl_conf_variables(repl_conf)
        self._validate_replication_config(repl_conf, app_conf=False)
        self._sync_repl_configs(repl_conf)
        self._notify_of_data_repl_orphans(repl_conf)

    @VersionTracker.notify_user
    @AnonymousUsageTracker.track_command_usage
    def execute(self):
        print()
        if self._replication_only:
            self.run_repl_sync()
        else:
            self.run_ci_sync()

        if self._errors_detected:
            self._out.error_h2(
                'Sync failed. Please address the outputted errors.')
        else:
            self._out.success_h2('Sync completed with no errors!')
Exemplo n.º 19
0
 def __init__(self, ots_svc: OTSService, ots_context: OTSContext,
              colors_enabled: bool):
     super().__init__(ots_put, colors_enabled, ots_context)
     self._ots = ots_svc
     self._utils = Utils(colors_enabled)
     self._out = Output(colors_enabled)
Exemplo n.º 20
0
class Promote(ConfigCommand):

    def __init__(self, source_ssm: SsmDao, config_completer_init: WordCompleter,
                 colors_enabled: bool, config_context: ConfigContext, session_mgr: SessionManager):
        super().__init__(promote, colors_enabled, config_context)
        self.config_context = config_context
        self._source_ssm = source_ssm
        self._session_mgr = session_mgr
        self._config_completer = config_completer_init
        self._utils = Utils(colors_enabled)
        self._out = Output(colors_enabled)

    def _promote(self):
        repeat = True
        parameters: List[Dict] = []
        while repeat:
            namespace = Input.input("Please input a namespace prefix to promote:"
                               f" (i.e. {self.context.defaults.service_ns}/foo/): ", completer=self._config_completer)
            if not self._utils.is_valid_input(namespace, "namespace", notify=False):
                continue

            try:
                parameters: List[Dict] = self._source_ssm.get_all_parameters([namespace])

                if not parameters and self._source_ssm.get_parameter(namespace):
                    parameters, latest_version = self._source_ssm.get_parameter_details(namespace)
                    parameters = list(parameters)

                if parameters:
                    repeat = False
                else:
                    self._out.warn("\nNo parameters found. Try again.\n")
            except ClientError as e:
                print(f"{self.c.fg_rd}ERROR: >> {e}{self.c.rs}")
                continue

        self._out.notify(f'\nFound [[{len(parameters)}]] parameter{"s" if len(parameters) > 1 else ""} to migrate.\n')

        assumable_roles = self.context.defaults.assumable_roles
        matching_roles = list(set([x for x in assumable_roles if x.role == self.config_context.role]))
        valid_envs = set([x.run_env.env for x in matching_roles])
        valid_envs.remove(self.run_env.env)  # Remove current env, we can't promote from dev -> dev
        next_env = Input.select(f'Please select the destination environment.', valid_options=list(valid_envs))

        matching_role = [role for role in matching_roles if role.run_env == RunEnv(env=next_env)][0]
        env: GlobalEnvironment = GlobalEnvironment(role=matching_role, region=self.config_context.defaults.region)
        dest_ssm = SsmDao(self._session_mgr.get_session(env, prompt=False).client('ssm'))

        for param in parameters:
            if 'KeyId' in param:
                self._out.print(f"Skipping param: [[{param['Name']}]]. It is encrypted and cannot be migrated.")
            else:
                promote_it = Input.y_n_input(f"Would you like to promote: {param['Name']}?",
                                             default_yes=True)

                if promote_it:
                    val = self._source_ssm.get_parameter(param['Name'])
                    description = param.get('Description', "")
                    dest_ssm.set_parameter(param['Name'], val, description, SSM_STRING)
                    self._out.success(f"Successfully promoted [[{param['Name']}]] to [[{next_env}]].\r\n")

    @VersionTracker.notify_user
    @AnonymousUsageTracker.track_command_usage
    def execute(self):
        self._promote()
Exemplo n.º 21
0
 def __init__(self, colors_enabled=False):
     self.c = TerminalFactory(colors_enabled).instance().get_colors()
     self._o = Output(colors_enabled)
Exemplo n.º 22
0
class Put(ConfigCommand):
    def __init__(self, ssm_init: SsmDao, colors_enabled: bool,
                 config_context: ConfigContext,
                 config_view: RBACLimitedConfigView, get: Get):
        super().__init__(put, colors_enabled, config_context)
        self._ssm = ssm_init
        self._utils = Utils(colors_enabled)
        self._config_view = config_view
        self._get = get
        self._source_key = Utils.attr_if_exists(copy_from, config_context.args)
        self._out = Output(colors_enabled)

        self._select_name = [('class:', 'Please input a PS Name: ')]

        self._FILE_PREFIX = "file://"

    def put_param(self, key=None, loop=False, display_hints=True) -> None:
        """
        Allows a user to define a PS name and add a new parameter at that named location. User will be prompted for a
        value, desc, and whether or not the parameter is a secret. If (Y) is selected for the secret, will encrypt the
        value with the appropriately mapped KMS key with the user's role.

        :param key: If specified, the user will be prompted for the specified key. Otherwise the user will be prompted
                    to specify the PS key to set.
        :param loop: Whether or not to continually loop and continue prompting the user for more keys.
        :param display_hints: Whether or not to display "Hints" to the user. You may want to turn this off if you are
                              looping and constantly calling put_param with a specified key.
        """

        value, desc, notify, put_another = True, None, False, True

        if display_hints:
            self._out.print(
                f"[[Hint:]] To upload a file's contents, pass in `file:///path/to/your/file` "
                f"in the value prompt.")

        while put_another:
            try:

                if not key:
                    lexer = PygmentsLexer(
                        FigLexer
                    ) if self.context.defaults.colors_enabled else None
                    style = style_from_pygments_cls(
                        FiggyPygment
                    ) if self.context.defaults.colors_enabled else None
                    key = Input.input(
                        'Please input a PS Name: ',
                        completer=self._config_view.get_config_completer(),
                        lexer=lexer,
                        style=style)
                    if self.parameter_is_existing_dir(key):
                        self._out.warn(
                            f'You attempted to store parameter named: {key},'
                            f' but it already exists in ParameterStore as a directory: {key}/'
                        )
                        key = None
                        continue

                if self._source_key:
                    plain_key = '/'.join(key.strip('/').split('/')[2:])
                    source_key = f'{self._source_key}/{plain_key}'
                    orig_value, orig_description = self._get.get_val_and_desc(
                        source_key)
                else:
                    orig_description = ''
                    orig_value = ''

                value = Input.input(f"Please input a value for {key}: ",
                                    default=orig_value if orig_value else '')

                if value.lower().startswith(self._FILE_PREFIX):
                    value = Utils.load_file(
                        value.replace(self._FILE_PREFIX, ""))

                existing_desc = self._ssm.get_description(key)
                desc = Input.input(
                    f"Please input an optional description: ",
                    optional=True,
                    default=existing_desc if existing_desc else
                    orig_description if orig_description else '')

                is_secret = Input.is_secret()
                parameter_type, kms_id = SSM_SECURE_STRING if is_secret else SSM_STRING, None
                if is_secret:
                    valid_keys = self._config_view.get_authorized_kms_keys()
                    if len(valid_keys) > 1:
                        key_name = Input.select_kms_key(valid_keys)
                    else:
                        key_name = valid_keys[0]

                    kms_id = self._config_view.get_authorized_key_id(
                        key_name, self.run_env)

                notify = True

                self._ssm.set_parameter(key,
                                        value,
                                        desc,
                                        parameter_type,
                                        key_id=kms_id)
                if key not in self._config_view.get_config_completer().words:
                    self._config_view.get_config_completer().words.append(key)

            except ClientError as e:
                if "AccessDeniedException" == e.response['Error']['Code']:
                    self._out.error(
                        f"\n\nYou do not have permissions to add config values at the path: [[{key}]]"
                    )
                    self._out.warn(
                        f"Your role of {self.context.role} may add keys under the following namespaces: "
                        f"{self._config_view.get_authorized_namespaces()}")
                    self._out.print(
                        f"Error message: {e.response['Error']['Message']}")
                else:
                    self._out.error(
                        f"Exception caught attempting to add config: {e}")

            print()
            if loop:
                to_continue = input(f"\nAdd another? (y/N): ")
                put_another = True if to_continue.lower() == 'y' else False
                key = None
            else:
                put_another = False

    @VersionTracker.notify_user
    @AnonymousUsageTracker.track_command_usage
    def execute(self):
        self.put_param(loop=True)

    def parameter_is_existing_dir(self, name: str):
        all_names: List[str] = self._config_view.get_config_names()
        match = list(filter(lambda x: f'{name}/' in x, all_names))
        return bool(match)
Exemplo n.º 23
0
class Upgrade(MaintenanceCommand):
    """
    Drives the --version command
    """
    def __init__(self, maintenance_context: MaintenanceContext,
                 config_service: Optional[ConfigService]):
        super().__init__(version, maintenance_context.defaults.colors_enabled,
                         maintenance_context)
        self.tracker = VersionTracker(self.context.defaults, config_service)
        self.upgrade_mgr = UpgradeManager(
            maintenance_context.defaults.colors_enabled)
        self._utils = Utils(
            colors_enabled=maintenance_context.defaults.colors_enabled)
        self._out = Output(
            colors_enabled=maintenance_context.defaults.colors_enabled)

    def upgrade(self):
        latest_version: FiggyVersionDetails = self.tracker.get_version()
        install_success, upgrade_it = False, True

        if self.upgrade_mgr.is_pip_install():
            self._out.error(
                f"Figgy appears to have been installed with pip. Please upgrade [[{CLI_NAME}]] with "
                f"`pip` instead.")
            self._out.print(
                f"\n\n[[Try this command]]: pip install figgy-cli --upgrade")

            self._out.print(
                f"\n\nPip based [[{CLI_NAME}]] installations do not support automatic upgrades and "
                f"instead require pip-managed upgrades; however,  Homebrew, one-line, and manual "
                f"installations support auto-upgrade. Please consider installing figgy through one "
                f"of these other methods to take advantage of this feature. "
                f"It will save you time, help keep you up-to-date, and enable important features like "
                f"release-rollbacks and canary releases! "
                f"[[https://www.figgy.dev/docs/getting-started/install/]]")
            sys.exit(0)

        install_path = self.upgrade_mgr.install_path

        if not install_path:
            self._utils.error_exit(
                f"Unable to detect local figgy installation. Please reinstall figgy and follow one "
                f"of the recommended installation procedures.")

        if latest_version.version == VERSION:
            self._out.success(
                f'You are currently using the latest version of [[{CLI_NAME}]]: [[{VERSION}]]'
            )
            upgrade_it = False
        elif self.tracker.upgrade_available():
            self._out.notify_h2(
                f"New version: [[{latest_version.version}]] is more recent than your version: [[{VERSION}]]"
            )
            upgrade_it = True
        elif not self.tracker.cloud_version_compatible_with_upgrade():
            self._out.notify_h2(
                f"Version [[{self.tracker.get_version().version}]] of the Figgy CLI is available but your "
                f"current version of Figgy Cloud ([[{self.tracker.current_cloud_version()}]]) is not compatible."
                f" Your administrator must first update FiggyCloud to at least version: "
                f"[[{self.tracker.required_cloud_version()}]] before you can upgrade Figgy."
            )
            upgrade_it = False
        else:
            self._out.notify_h2(
                f"Your version: [[{VERSION}]] is more recent then the current recommended version "
                f"of {CLI_NAME}: [[{latest_version.version}]]")
            upgrade_it = Input.y_n_input(
                f'Would you like to revert to the current recommended version '
                f'of {CLI_NAME}?')

        if upgrade_it:
            if self._utils.is_mac():
                self._out.print(
                    f"\nMacOS auto-upgrade is supported. Performing auto-upgrade."
                )
                install_success = self.install_mac(latest_version)
            elif self._utils.is_linux():
                self._out.print(
                    f"\nLinux auto-upgrade is supported. Performing auto-upgrade."
                )
                install_success = self.install_linux(latest_version)
            elif self._utils.is_windows():
                self._out.print(
                    f"\nWindows auto-upgrade is supported. Performing auto-upgrade."
                )
                install_success = self.install_windows(latest_version)

            if install_success:
                self._out.success(
                    f"Installation successful! Exiting. Rerun `[[{CLI_NAME}]]` "
                    f"to use the latest version!")
            else:
                self._out.warn(
                    f"\nUpgrade may not have been successful. Check by re-running "
                    f"[[`{CLI_NAME}` --version]] to see if it was. If it wasn't, please reinstall [[`{CLI_NAME}`]]. "
                    f"See {INSTALL_URL}.")

    def install_mac(self, latest_version: FiggyVersionDetails) -> bool:
        install_path = '/usr/local/bin/figgy'

        if self.upgrade_mgr.is_brew_install():
            self._out.notify_h2(f"Homebrew installation detected!")

            print(
                f"This upgrade process will not remove your brew installation but will instead unlink it. "
                f"Going forward you will no longer need homebrew to manage {CLI_NAME}. Continuing is recommended.\n"
            )

            selection = Input.y_n_input(f"Continue? ", default_yes=True)
        else:
            selection = True

        if selection:
            self.upgrade_mgr.install_onedir(install_path,
                                            latest_version.version, MAC)
            return True
        else:
            self._out.print(
                f'\n[[Auto-upgrade aborted. To upgrade through brew run:]] \n'
                f'-> brew upgrade figtools/figgy/figgy')
            self._out.warn(
                f"\n\nYou may continue to manage [[{CLI_NAME}]] through Homebrew, but doing so will "
                f"limit some upcoming functionality around canary releases, rollbacks, and dynamic "
                f"version-swapping.")
            return False

    def install_linux(self, latest_version: FiggyVersionDetails) -> bool:
        install_path = self.upgrade_mgr.install_path
        self.upgrade_mgr.install_onedir(install_path, latest_version.version,
                                        LINUX)
        return True

    def install_windows(self, latest_version: FiggyVersionDetails) -> bool:
        install_path = self.upgrade_mgr.install_path
        self.upgrade_mgr.install_onedir(install_path, latest_version.version,
                                        WINDOWS)
        return True

    @AnonymousUsageTracker.track_command_usage
    def execute(self):
        self.upgrade()
Exemplo n.º 24
0
class Delete(ConfigCommand):

    def __init__(self, ssm_init: SsmDao, cfg_view: RBACLimitedConfigView,
                 config_init: ConfigDao, repl_init: ReplicationDao, context: ConfigContext, colors_enabled: bool,
                 config_completer: WordCompleter):
        super().__init__(delete, colors_enabled, context)
        self._ssm = ssm_init
        self._config = config_init
        self._repl = repl_init
        self._utils = Utils(colors_enabled)
        self._config_completer = config_completer
        self._out = Output(colors_enabled)
        self._cfg_view = cfg_view

    def delete_param(self, key) -> bool:
        """
        Manages safe deletion through the CLI. Prevents deletion of replication sources. Prompts user for deletion of
        replication destinations.
        Args:
            key: PS Name / Key

        Returns: bool - T/F based on whether a parameter was actually deleted.
        """
        sources = self._repl.get_cfgs_by_src(key)  # type: List[ReplicationConfig]
        repl_conf = self._repl.get_config_repl(key)  # type: ReplicationConfig

        if len(sources) > 0:
            self._out.error(f"You're attempting to delete a key that is the source for at least one "
                            f"replication config.\n[[{key}]] is actively replicating to these"
                            f" destinations:\n")
            for src in sources:
                self._out.warn(f"Dest: [[{src.destination}]]. This config was created by [[{src.user}]]. ")

            self._out.print(
                f"\r\n[[{key}]] is a replication SOURCE. Deleting this source would effectively BREAK "
                f"replication to the above printed destinations. You may NOT delete sources that are actively "
                f"replicating. Please delete the above printed DESTINATIONS first. "
                f"Once they have been deleted, you will be allowed to delete this "
                f"SOURCE.")
            return False
        elif repl_conf is not None:
            selection = "unselected"
            while selection.lower() != "y" and selection.lower() != "n":
                repl_msg = [
                    (f'class:{self.c.rd}', f"{key} is an active replication destination created by "),
                    (f'class:{self.c.bl}', f"{repl_conf.user}. "),
                    (f'class:{self.c.rd}', f"Do you want to ALSO delete this replication config and "
                                           f"permanently delete {key}? "),
                    (f'class:', "(y/N): ")]
                selection = prompt(repl_msg, completer=WordCompleter(['Y', 'N']), style=FIGGY_STYLE)
                selection = selection if selection != '' else 'n'
                if selection.strip().lower() == "y":
                    self._repl.delete_config(key)
                    self._ssm.delete_parameter(key)
                    self._out.success(f"[[{key}]] and replication config destination deleted successfully.")
                    return True
                elif selection.strip().lower() == "n":
                    return False

        else:
            try:
                self._ssm.delete_parameter(key)
            except ClientError as e:
                if e.response['Error']['Code'] == 'ParameterNotFound':
                    pass
                elif "AccessDeniedException" == e.response['Error']['Code']:
                    self._out.error(f"You do not have permissions to delete: {key}")
                    return False
                else:
                    raise

            print(f"{self.c.fg_gr}{key} deleted successfully.{self.c.rs}\r\n")
            return True

    def _delete_param(self):
        """
        Prompts user for a parameter name to delete, then deletes
        """
        # Add all keys
        key, notify, delete_another = None, False, True

        while delete_another:
            key = Input.input('PS Name to Delete: ', completer=self._config_completer)
            try:
                if self.delete_param(key):
                    if key in self._config_completer.words:
                        self._config_completer.words.remove(key)
                else:
                    continue
            except ClientError as e:
                error_code = e.response['Error']['Code']
                if "AccessDeniedException" == error_code:
                    self._out.error(f"\n\nYou do not have permissions to delete config values at the path: [[{key}]]")
                    self._out.warn(f"Your role of {self.context.role} may delete keys under the following namespaces: "
                                   f"{self._cfg_view.get_authorized_namespaces()}")
                    self._out.print(f"Error message: {e.response['Error']['Message']}")
                elif "ParameterNotFound" == error_code:
                    self._out.error(f"The specified Name: [[{key}]] does not exist in the selected environment. "
                                    f"Please try again.")
                else:
                    self._out.error(f"Exception caught attempting to delete config: {e.response['Message']}")

            print()
            to_continue = input(f"Delete another? (Y/n): ")
            to_continue = to_continue if to_continue != '' else 'y'
            delete_another = to_continue.lower() == "y"

    @VersionTracker.notify_user
    @AnonymousUsageTracker.track_command_usage
    def execute(self):
        self._delete_param()
Exemplo n.º 25
0
 def __init__(self, cli_defaults: CLIDefaults, config_service: Optional[ConfigService]):
     self._cli_defaults = cli_defaults
     self.c = TerminalFactory(self._cli_defaults.colors_enabled).instance().get_colors()
     self._config = config_service
     self._out = Output(colors_enabled=cli_defaults.colors_enabled)
Exemplo n.º 26
0
class Prune(ConfigCommand):
    """
    Detects stray ParameterStore names, replication configurations, and merge keys, then
    prompts the user to delete them. This is typically run after the `sync` command informs
    the user that there are stray configurations.
    """
    def __init__(self,
                 ssm: SsmDao,
                 ddb: ConfigDao,
                 repl_dao: ReplicationDao,
                 context: ConfigContext,
                 config_completer_init: WordCompleter,
                 colors_enabled: bool,
                 delete: Delete,
                 args=None):
        super().__init__(prune, colors_enabled, context)
        self._ssm = ssm  # type: SsmDao
        self._config_dao = ddb  # type: ConfigDao
        self._repl = repl_dao
        self._config_completer = config_completer_init  # type: WordCompleter
        self._utils = Utils(colors_enabled)
        self.example = f"{self.c.fg_bl}{CLI_NAME} config {self.command_printable} --env dev " \
            f"--config /path/to/figgy.json{self.c.rs}"
        self._config_path = context.ci_config_path if context.ci_config_path else Utils.find_figgy_json(
        )
        self._out = Output(colors_enabled)

        # If user passes in --info flag, we don't need all of this to be initialized.
        if not hasattr(args, info.name) or args.info is False:
            # Validate & parse figgy.json
            self._config = self._utils.get_ci_config(
                self._config_path)  # type: Dict
            self._shared_names = set(
                self._utils.get_config_key_safe(SHARED_KEY,
                                                self._config,
                                                default=[]))  # type: Set
            self._repl_conf = self._utils.get_config_key_safe(
                REPLICATION_KEY, self._config, default={})  # type: Dict
            self._merge_conf = self._utils.get_config_key_safe(
                MERGE_KEY, self._config, default={})  # type: Dict
            self._config_keys = set(
                self._utils.get_config_key_safe(CONFIG_KEY,
                                                self._config,
                                                default=[]))  # type: Set
            self._merge_keys = set(self._merge_conf.keys())  # type: Set
            self._namespace = self._utils.get_namespace(
                self._config)  # type: str
            self._delete_command = delete
            self._repl_from_conf = self._utils.get_config_key_safe(
                REPL_FROM_KEY, self._config, default={})
            self._repl_conf = KeyUtils.merge_repl_and_repl_from_blocks(
                self._repl_conf, self._repl_from_conf, self._namespace)

            # Build list of all keys found across all config types
            self._all_keys = KeyUtils().find_all_expected_names(
                self._config_keys, self._shared_names, self._merge_conf,
                self._repl_conf, self._repl_from_conf, self._namespace)

    # Prompts for this file
    def _cleanup_parameters(self, config_keys: Set):
        """
        Prompts user for prune of stray ParameterStore names.
        Args:
            config_keys: set() -> Set of parameters that are found as defined in the figgy.json file for a svc
        """

        self._out.notify(f"Checking for stray config names.\r\n")

        # Find & Prune stray keys
        ps_keys = set(
            list(
                map(lambda x: x['Name'],
                    self._ssm.get_all_parameters([self._namespace]))))
        ps_only_keys = ps_keys.difference(config_keys)
        for key in ps_only_keys:
            selection = Input.y_n_input(
                f"{key} exists in ParameterStore but does not exist "
                f"in your config, do you want to delete it?",
                default_yes=False)

            if selection:
                self._delete_command.delete_param(key)
            else:
                self._out.notify("OK, skipping due to user selection.")
        if not ps_only_keys:
            print(f"{self.c.fg_bl}No stray keys found.{self.c.rs}")

    def _cleanup_replication(self) -> None:
        """
        Cleans up stray replication and merge configurations.
        Args:
            config_repl: The replication config dictionary as parsed from the figgy.json file
            shared_names: Expected parameters as defined in the figgy.json
            config_merge: The merge config dict as defined
            run_env: RunEnv object
            namespace: str -> /app/service-name as defined or parsed from the figgy.json file.
        """

        self._out.notify(f"Checking for stray replication configs.")
        remote_cfgs = self._repl.get_all_configs(self._namespace)
        notify = True
        if remote_cfgs:
            for cfg in remote_cfgs:
                if cfg.source not in list(self._repl_conf.keys()) \
                        and cfg.destination not in list(self._repl_conf.values()) \
                        and cfg.destination not in self._shared_names \
                        and cfg.destination not in list(self._merge_conf.keys()) \
                        and (isinstance(cfg.source, list) or cfg.source.startswith(shared_ns)
                             or cfg.source.startswith(self.context.defaults.service_ns)):
                    notify = False

                    selection = "unselected"
                    while selection.lower() != "y" and selection.lower(
                    ) != "n":
                        selection = input(
                            f"Remote replication config with {self.c.fg_bl}{self._namespace}{self.c.rs} replication "
                            f"mapping of: {self.c.fg_bl}{cfg.source} -> {cfg.destination}{self.c.rs} does not "
                            f"exist in your figgy.json. Should this be removed? (y/N): "
                        ).lower()
                        selection = selection if selection != '' else 'n'
                        if selection == "y":
                            self._repl.delete_config(cfg.destination)
        if notify:
            self._out.success(
                "No remote replication configs found available for prune under namespace: "
                f"[[{self._namespace}]]")

    @VersionTracker.notify_user
    @AnonymousUsageTracker.track_command_usage
    def execute(self):
        # prune service configs
        print()
        self._cleanup_parameters(set(self._all_keys))

        print()
        # prune replication configs
        self._cleanup_replication()
Exemplo n.º 27
0
class Restore(ConfigCommand):
    def __init__(self, ssm_init: SsmDao, kms_init: KmsService,
                 config_init: ConfigDao, repl_dao: ReplicationDao,
                 audit_dao: AuditDao, cfg_view: RBACLimitedConfigView,
                 colors_enabled: bool, context: ConfigContext,
                 config_completer: WordCompleter, delete: Delete):
        super().__init__(restore, colors_enabled, context)
        self._config_context = context
        self._ssm = ssm_init
        self._kms = kms_init
        self._config = config_init
        self._repl = repl_dao
        self._audit = audit_dao
        self._cfg_view = cfg_view
        self._utils = Utils(colors_enabled)
        self._point_in_time = context.point_in_time
        self._config_completer = config_completer
        self._delete = delete
        self._out = Output(colors_enabled=colors_enabled)

    def _client_exception_msg(self, item: RestoreConfig, e: ClientError):
        if "AccessDeniedException" == e.response["Error"]["Code"]:
            self._out.error(
                f"\n\nYou do not have permissions to restore config at the path: [[{item.ps_name}]]"
            )
        else:
            self._out.error(
                f"Error message: [[{e.response['Error']['Message']}]]")

    def get_parameter_arn(self, parameter_name: str):
        account_id = self._ssm.get_parameter(ACCOUNT_ID_PATH)

        return f"arn:aws:ssm:us-east-1:{account_id}:parameter{parameter_name}"

    def _restore_param(self) -> None:
        """
        Allow the user to query a parameter store entry from dynamo, so we can query + restore it, if desired.
        """

        table_entries = []

        ps_name = prompt(f"Please input PS key to restore: ",
                         completer=self._config_completer)

        if self._is_replication_destination(ps_name):
            repl_conf = self._repl.get_config_repl(ps_name)
            self._print_cannot_restore_msg(repl_conf)
            exit(0)

        self._out.notify(
            f"\n\nAttempting to retrieve all restorable values of [[{ps_name}]]"
        )
        items: List[RestoreConfig] = self._audit.get_parameter_restore_details(
            ps_name)

        if len(items) == 0:
            self._out.warn(
                "No restorable values were found for this parameter.")
            return

        for i, item in enumerate(items):
            date = time.strftime("%Y-%m-%d %H:%M:%S",
                                 time.localtime(item.ps_time / 1000))

            # we need to decrypt the value, if encrypted, in order to show it to the user
            if item.ps_key_id:
                item.ps_value = self._kms.decrypt_with_context(
                    item.ps_value,
                    {"PARAMETER_ARN": self.get_parameter_arn(item.ps_name)},
                )
            table_entries.append([i, date, item.ps_value, item.ps_user])

        self._out.print(
            tabulate(
                table_entries,
                headers=["Item #", "Time Created", "Value", "User"],
                tablefmt="grid",
                numalign="center",
                stralign="left",
            ))

        valid_options = [f'{x}' for x in range(0, len(items))]
        choice = int(
            Input.select("Select an item number to restore: ",
                         valid_options=valid_options))
        item = items[choice] if items[choice] else None

        restore = Input.y_n_input(
            f"Are you sure you want to restore item #{choice} and have it be the latest version? ",
            default_yes=False)

        if not restore:
            self._utils.warn_exit("Restore aborted.")

        key_id = None if item.ps_type == "String" else item.ps_key_id

        try:
            self._ssm.set_parameter(item.ps_name,
                                    item.ps_value,
                                    item.ps_description,
                                    item.ps_type,
                                    key_id=key_id)

            current_value = self._ssm.get_parameter(item.ps_name)
            if current_value == item.ps_value:
                self._out.success("Restore was successful")
            else:
                self._out.error(
                    "Latest version in parameter store doesn't match what we restored."
                )
                self._out.print(
                    f"Current value: [[{current_value}]].  Expected value: [[{item.ps_value}]]"
                )

        except ClientError as e:
            self._client_exception_msg(item, e)

    def _decrypt_if_applicable(self, entry: RestoreConfig) -> str:
        if entry.ps_type != "String":
            return self._kms.decrypt_with_context(
                entry.ps_value,
                {"PARAMETER_ARN": self.get_parameter_arn(entry.ps_name)})
        else:
            return entry.ps_value

    def _is_replication_destination(self, ps_name: str):
        return self._repl.get_config_repl(ps_name)

    def _restore_params_to_point_in_time(self):
        """
        Restores parameters as they were to a point-in-time as defined by the time provided by the users.
        Replays parameter history to that point-in-time so versioning remains intact.
        """

        repl_destinations = []
        ps_prefix = Input.input(
            f"Which parameter store prefix would you like to recursively restore? "
            f"(e.g., /app/demo-time): ",
            completer=self._config_completer)

        authed_nses = self._cfg_view.get_authorized_namespaces()
        valid_prefix = (
            [True for ns in authed_nses if ps_prefix.startswith(ns)]
            or [False])[0]
        self._utils.validate(
            valid_prefix,
            f"Selected namespace must begin with a 'Fig Tree' you have access to. "
            f"Such as: {authed_nses}")

        time_selected, time_converted = None, None
        try:
            time_selected = Input.input(
                "Seconds since epoch to restore latest values from: ")
            time_converted = datetime.fromtimestamp(float(time_selected))
        except ValueError as e:
            if "out of range" in e.args[0]:
                try:
                    time_converted = datetime.fromtimestamp(
                        float(time_selected) / 1000)
                except ValueError as e:
                    self._utils.error_exit(
                        "Make sure you're using a format of either seconds or milliseconds since epoch."
                    )
            elif "could not convert" in e.args[0]:
                self._utils.error_exit(
                    f"The format of this input should be seconds since epoch. (e.g., 1547647091)\n"
                    f"Try using: https://www.epochconverter.com/ to convert your date to this "
                    f"specific format.")
            else:
                self._utils.error_exit(
                    "An unexpected exception triggered: "
                    f"'{e}' while trying to convert {time_selected} to 'datetime' format."
                )

        self._utils.validate(
            time_converted is not None,
            f"`{CLI_NAME}` encountered an error parsing your input for "
            f"target rollback time.")
        keep_going = Input.y_n_input(
            f"Are you sure you want to restore all figs under {ps_prefix} values to their state at: "
            f"{time_converted}? ",
            default_yes=False)

        if not keep_going:
            self._utils.warn_exit("Aborting restore due to user selection")

        ps_history: PSHistory = self._audit.get_parameter_history_before_time(
            time_converted, ps_prefix)
        restore_count = len(ps_history.history.values())

        if len(ps_history.history.values()) == 0:
            self._utils.warn_exit(
                "No results found for time range.  Aborting.")

        last_item_name = 'Unknown'
        try:
            for item in ps_history.history.values():
                last_item_name = item.name

                if self._is_replication_destination(item.name):
                    repl_destinations.append(item.name)
                    continue

                if item.cfg_at(time_converted).ps_action == SSM_PUT:
                    cfgs_before: List[RestoreConfig] = item.cfgs_before(
                        time_converted)
                    cfg_at: RestoreConfig = item.cfg_at(time_converted)
                    ssm_value = self._ssm.get_parameter(item.name)
                    dynamo_value = self._decrypt_if_applicable(cfg_at)

                    if ssm_value != dynamo_value:
                        if ssm_value is not None:
                            self._ssm.delete_parameter(item.name)

                        for cfg in cfgs_before:
                            decrypted_value = self._decrypt_if_applicable(cfg)
                            self._out.print(
                                f"\nRestoring: [[{cfg.ps_name}]] \nValue: [[{decrypted_value}]]"
                                f"\nDescription: [[{cfg.ps_description}]]\nKMS Key: "
                                f"[[{cfg.ps_key_id if cfg.ps_key_id else '[[No KMS Key Specified]]'}]]"
                            )
                            self._out.notify(
                                f"Replaying version: [[{cfg.ps_version}]] of [[{cfg.ps_name}]]"
                            )
                            print()

                            self._ssm.set_parameter(cfg.ps_name,
                                                    decrypted_value,
                                                    cfg.ps_description,
                                                    cfg.ps_type,
                                                    key_id=cfg.ps_key_id)
                    else:
                        self._out.success(
                            f"Config: {item.name} is current. Skipping.")
                else:
                    # This item must have been a delete, which means this config didn't exist at that time.
                    self._out.print(
                        f"Checking if [[{item.name}]] exists. It was previously deleted."
                    )
                    self._prompt_delete(item.name)
        except ClientError as e:
            if "AccessDeniedException" == e.response["Error"]["Code"]:
                self._utils.error_exit(
                    f"\n\nYou do not have permissions to restore config at the path:"
                    f" [[{last_item_name}]]")
            else:
                self._utils.error_exit(
                    f"Caught error when attempting restore. {e}")

        for item in repl_destinations:
            cfg = self._repl.get_config_repl(item)
            self._print_cannot_restore_msg(cfg)

        print("\n\n")
        if not repl_destinations:
            self._out.success_h2(
                f"[[{restore_count}]] configurations restored successfully!")
        else:
            self._out.warn(
                f"\n\n[[{len(repl_destinations)}]] configurations were not restored because they are shared "
                f"from other destinations. To restore them, restore their sources."
            )
            self._out.success(
                f"{restore_count - len(repl_destinations)} configurations restored successfully."
            )

    def _print_cannot_restore_msg(self, repl_conf: ReplicationConfig):
        self._out.print(
            f"Parameter: [[{repl_conf.destination}]] is a shared parameter. ")
        self._out.print(f"Shared From: [[{repl_conf.source}]]")
        self._out.print(f"Shared by: [[{repl_conf.user}]]")
        self._out.warn(
            f"To restore this parameter you should restore the source: {repl_conf.source} instead!"
        )
        print()

    def _prompt_delete(self, name):
        param = self._ssm.get_parameter_encrypted(name)
        if param:
            selection = Input.y_n_input(
                f"PS Name: {name} did not exist at this restore time."
                f" Delete it? ",
                default_yes=False)

            if selection:
                self._delete.delete_param(name)

    @VersionTracker.notify_user
    @AnonymousUsageTracker.track_command_usage
    def execute(self):
        if self._point_in_time:
            self._restore_params_to_point_in_time()
        else:
            self._restore_param()
Exemplo n.º 28
0
 def __init__(self, defaults: CLIDefaults):
     self._defaults = defaults
     self._out = Output(self._defaults.colors_enabled)
     self._utils = Utils(self._defaults.colors_enabled)
Exemplo n.º 29
0
class Login(HelpCommand, ABC):
    """
    Log the user into every possible environment they have access to. Sessions are cached.
    This improves figgy performance throughout the day.
    """
    def __init__(self, help_context: HelpContext, figgy_setup: FiggySetup,
                 figgy_context: FiggyContext):
        super().__init__(login, Utils.not_windows(), help_context)
        self._setup = figgy_setup
        self._defaults: CLIDefaults = figgy_setup.get_defaults()
        self._figgy_context = figgy_context
        self._utils = Utils(self._defaults.colors_enabled)
        self._aws_cfg = AWSConfig(color=self.c)
        self._out = Output(self._defaults.colors_enabled)

        self.example = f"\n\n{self.c.fg_bl}{CLI_NAME} {login.name} \n" \
                       f"{self.c.rs}{self.c.fg_yl}  --or--{self.c.rs}\n" \
                       f"{self.c.fg_bl}{CLI_NAME} {login.name} {sandbox.name}{self.c.rs}"

    def login(self):
        self._utils.validate(
            self._defaults.provider.name in Provider.names(),
            f"You cannot login until you've configured Figgy. Please run `{CLI_NAME}` --configure"
        )
        provider = SessionProviderFactory(self._defaults,
                                          self._figgy_context).instance()
        assumable_roles: List[AssumableRole] = provider.get_assumable_roles()
        self._out.print(
            f"{self.c.fg_bl}Found {len(assumable_roles)} possible logins. Logging in...{self.c.rs}"
        )

        for role in assumable_roles:
            self._out.print(
                f"Login successful for {role.role} in environment: {role.run_env}"
            )
            provider.get_session_and_role(role, False)

        self._out.print(
            f"{self.c.fg_gr}Login successful. All sessions are cached.{self.c.rs}"
        )

    def login_sandbox(self):
        """
        If user provides --role flag, skip role & env selection for a smoother user experience.
        """
        EnvironmentValidator(self._defaults).validate_environment_variables()

        Utils.wipe_vaults() or Utils.wipe_defaults(
        ) or Utils.wipe_config_cache()

        self._out.print(
            f"{self.c.fg_bl}Logging you into the Figgy Sandbox environment.{self.c.rs}"
        )
        user = Input.input("Please input a user name: ", min_length=2)
        colors = Input.select_enable_colors()

        # Prompt user for role if --role not provided
        if commands.role not in self.context.options:
            role = Input.select("\n\nPlease select a role to impersonate: ",
                                valid_options=SANDBOX_ROLES)
        else:
            role = self.context.role.role
            self._utils.validate(
                role in SANDBOX_ROLES,
                f"Provided role: >>>`{role}`<<< is not a valid sandbox role."
                f" Please choose from {SANDBOX_ROLES}")

        params = {'role': role, 'user': user}
        result = requests.get(GET_SANDBOX_CREDS_URL, params=params)

        if result.status_code != 200:
            self._utils.error_exit(
                "Unable to get temporary credentials from the Figgy sandbox. If this problem "
                f"persists please notify us on our GITHUB: {FIGGY_GITHUB}")

        data = result.json()
        response = SandboxLoginResponse(**data)
        self._aws_cfg.write_credentials(
            access_key=response.AWS_ACCESS_KEY_ID,
            secret_key=response.AWS_SECRET_ACCESS_KEY,
            token=response.AWS_SESSION_TOKEN,
            region=FIGGY_SANDBOX_REGION,
            profile_name=FIGGY_SANDBOX_PROFILE)

        defaults = CLIDefaults.sandbox(user=user, role=role, colors=colors)
        self._setup.save_defaults(defaults)

        run_env = RunEnv(
            env='dev',
            account_id=SANDBOX_DEV_ACCOUNT_ID) if self.context.role else None

        config_mgr = ConfigManager.figgy()
        config_mgr.set(Config.Section.Bastion.PROFILE, FIGGY_SANDBOX_PROFILE)
        defaults = self._setup.configure_extras(defaults)
        defaults = self._setup.configure_roles(current_defaults=defaults,
                                               role=Role(role=role),
                                               run_env=run_env)
        defaults = self._setup.configure_figgy_defaults(defaults)
        self._setup.save_defaults(defaults)

        self._out.success(
            f"\nLogin successful. Your sandbox session will last for [[1 hour]]."
        )

        self._out.print(
            f"\nIf your session expires, you may rerun `{CLI_NAME} login sandbox` to get another sandbox session. "
            f"\nAll previous figgy sessions have been disabled, you'll need to run {CLI_NAME} "
            f"--configure to leave the sandbox.")

    @VersionTracker.notify_user
    @AnonymousUsageTracker.track_command_usage
    def execute(self):
        if self.context.command == login:
            self.login()
        elif self.context.command == sandbox:
            Utils.wipe_vaults() or Utils.wipe_defaults()
            self.login_sandbox()
Exemplo n.º 30
0
class Share(ConfigCommand):
    def __init__(self, ssm_init, repl_init: ReplicationDao,
                 config_completer_init, colors_enabled: bool,
                 config_context: ConfigContext):
        super().__init__(share, colors_enabled, config_context)

        self._ssm = ssm_init
        self._repl = repl_init
        self._config_completer = config_completer_init
        self._utils = Utils(colors_enabled)
        self._out = Output(colors_enabled)

    def _share_param(self):
        """
        Enables sharing of parameters from one namespace to the /app/service-name/replicated namespace.
        Args:
            run_env: Run Environment
        """

        source_name_msg = [(f'class:{self.c.bl}',
                            'Input the PS Name you wish to share: ')]

        dest_name_msg = [(f'class:{self.c.bl}',
                          'Input the destination of the shared value: ')]

        share_another = True
        while share_another:
            print()
            key = prompt(source_name_msg,
                         completer=self._config_completer,
                         style=FIGGY_STYLE)
            if re.match(f"{self.context.defaults.service_ns}/.*", key):
                self._out.error(
                    f"The SOURCE of replication may not be from within the "
                    f"[[{self.context.defaults.service_ns}/]] namespace.\n")
                continue

            dest = prompt(dest_name_msg,
                          completer=self._config_completer,
                          style=FIGGY_STYLE)
            key_value = None
            try:
                key_value = self._ssm.get_parameter(key)
            except ClientError as e:
                denied = "AccessDeniedException" == e.response['Error']['Code']
                if denied and "AWSKMS; Status Code: 400;" in e.response[
                        'Error']['Message']:
                    self._out.error(
                        f"You do not have access to decrypt the value of Name: [[{key}]]"
                    )
                elif denied:
                    self._out.error(
                        f"You do not have access to Name: [[{key}]]")
                else:
                    raise

                self._utils.validate(
                    key_value is not None,
                    "Either the Name you provided to share does not exist or you do not have the "
                    "proper permissions to share the provided Name.")

            namespace = self._utils.parse_namespace(dest)
            repl_config = ReplicationConfig(destination=dest,
                                            env_alias=self.run_env.env,
                                            namespace=namespace,
                                            source=key,
                                            type=ReplicationType.APP.value)
            self._repl.put_config_repl(repl_config)
            self._out.success(f"[[{key}]] successfully shared.")
            to_continue = input(f"Share another? (y/N): ")
            to_continue = to_continue if to_continue != '' else 'n'
            share_another = to_continue.lower() == "y"

    @VersionTracker.notify_user
    @AnonymousUsageTracker.track_command_usage
    def execute(self):
        self._share_param()