Exemplo n.º 1
0
class Put(OTSCommand):
    """
    Allows retrieval of a recently stored one-time-secret.
    """
    def __init__(self, ots_svc: OTSService, ots_context: OTSContext,
                 colors_enabled: bool):
        super().__init__(ots_put, colors_enabled, ots_context)
        self._ots = ots_svc
        self._utils = Utils(colors_enabled)
        self._out = Output(colors_enabled)

    def _put(self):
        value = Input.input(f"Please input a value to share: ")

        # Safe convert to int or float, then validate
        expires_in_hours = Input.input(
            f"Select # of hours before value auto-expires: ", default="1")
        expires_in_hours = Utils.safe_cast(expires_in_hours, int,
                                           expires_in_hours)
        expires_in_hours = Utils.safe_cast(expires_in_hours, float,
                                           expires_in_hours)
        self._utils.validate(
            isinstance(expires_in_hours, int)
            or isinstance(expires_in_hours, float),
            "You must provide a number of hours for when this secret should expire. No strings accepted."
        )
        self._utils.validate(
            expires_in_hours <= 48,
            "You may not specify an expiration time more than 48 hours in the future."
        )

        secret_id = self._ots.put_ots(value, expires_in_hours)
        self._out.print(
            f"\n\nTo share this secret, recipients will need the following")
        self._out.print(f"\n[[Secret Id]] -> {secret_id}")
        self._out.success(
            f"\n\nValue successfully stored, it will expire in {expires_in_hours} hours, or when retrieved."
        )

    def execute(self):
        self._put()
Exemplo n.º 2
0
class Sync(ConfigCommand):
    """
    Synchronizes local application configuration state as defined in the figgy.json file and the existing remote state
    in the targeted environment. Also configures replication for designated shared parameters in the
    figgy.json file.
    """
    def __init__(self, ssm_init: SsmDao, config_init: ConfigDao,
                 repl_dao: ReplicationDao, colors_enabled: bool,
                 context: ConfigContext, get: Get, put: Put):
        super().__init__(sync, colors_enabled, context)
        self._config = config_init
        self._ssm = ssm_init
        self._repl = repl_dao
        self._config_path = context.ci_config_path if context.ci_config_path else Utils.find_figgy_json(
        )
        self._utils = Utils(colors_enabled)
        self._replication_only = context.replication_only
        self._errors_detected = False
        self.example = f"{self.c.fg_bl}{CLI_NAME} config {self.command_printable} " \
                       f"--env dev --config /path/to/config{self.c.rs}"
        self._get: Get = get
        self._put: Put = put
        self._FILE_PREFIX = "file://"
        self._out = Output(colors_enabled)

    def _input_config_values(self, config_keys: Set[str]) -> None:
        """
        Prompts the user for each of the passed in set of config values if any are missing from PS.
        :param config_keys: Set[string] - config values to prompt the user to add.
        """
        def validate_msg(ps_name: str):
            self._out.success(f"Name Validated: [[{ps_name}]]")
            return validate_msg

        count = 0
        for key in config_keys:
            try:
                if not self._get.get(key):
                    self._out.warn(
                        f"Fig: [[{key}]] missing from PS in environment: [[{self.run_env}]]."
                    )
                    self._put.put_param(key=key, display_hints=False)
                    count = count + 1
                else:
                    validate_msg(key)
            except ClientError:
                validate_msg(key)

        if count:
            self._out.success(
                f"[[{count}]] {'value' if count == 1 else 'values'} added successfully"
            )

    def _sync_keys(self, config_namespace: str, all_keys: Set):
        """
        Looks for stray parameters (keys) under the namespace provided and prints out information about
        missing parameters that are not defined in the figgy.json file
        Args:
            config_namespace: Namespace to query PS under.
            all_keys: All keys that exist in figgy.json to compare against.
        """
        self._out.notify(f"Checking for stray config names.")

        # Find & Prune stray keys
        ps_keys = set(
            list(
                map(lambda x: x['Name'],
                    self._ssm.get_all_parameters([config_namespace]))))
        ps_only_keys = ps_keys.difference(all_keys)

        UNUSED_CONFIG_DETECTED = f"%%red%%The following Names were found in PS but are not referenced in your configurations. \n" \
                                 f"Use the %%rs%%%%blue%%`prune`%%rs%%%%red%% command to clean them up once all " \
                                 f"deployed application versions no longer use these configurations: %%rs%%"

        if len(ps_only_keys) > 0:
            self._out.warn(
                "The following Names were found in PS but are not referenced in your configurations. \n"
                "Use the [[prune]] command to clean them up once all.")

        for key in ps_only_keys:
            self._out.print(f"Unused Parameter: [[{key}]]")

        if not ps_only_keys:
            self._out.success(f"No stray configurations found.")

    def _sync_repl_configs(self,
                           config_repl: Dict,
                           namespace: str = None) -> None:
        """
        Syncs replication configs from a defined "replicate_figs" block parsed from either the figgy.json file
        or the data replication config json file.
        Args:
            config_repl: Dict of KV Pairs for a repl config. Source -> Dest
            namespace: Optional namespace. Parsed from destination if not supplied.
        """
        local_configs: List[ReplicationConfig] = ReplicationConfig.from_dict(
            conf=config_repl,
            type=ReplicationType(REPL_TYPE_APP),
            run_env=self.run_env,
            namespace=namespace)
        for l_cfg in local_configs:
            # Namespace will be missing for --replication-only syncs. Otherwise, with standard syncs, namespace is passed
            # as a parameter here.
            if not namespace:
                namespace = l_cfg.namespace

            if not l_cfg.destination.startswith(namespace):
                self._out.error(
                    f"Replication config [[{l_cfg.source} -> {l_cfg.destination}]] has a destination that "
                    f"is not in your service namespace: [[{namespace}]]. This is invalid."
                )
                self.errors_detected = True
                continue

            remote_cfg = self._repl.get_config_repl(l_cfg.destination)

            # Should never happen, except when someone manually deletes source / destination without going through CLI
            missing_from_ps = self.__get_param_encrypted(l_cfg.source) is None

            if not remote_cfg or remote_cfg != l_cfg or missing_from_ps:
                try:
                    if self._can_replicate_from(
                            l_cfg.source
                    ) and not remote_cfg or missing_from_ps:
                        self._repl.put_config_repl(l_cfg)
                        self._out.print(
                            f"[[Replication added:]] {l_cfg.source} -> {l_cfg.destination}"
                        )
                    elif self._can_replicate_from(l_cfg.source) and remote_cfg:
                        self._repl.put_config_repl(l_cfg)
                        self._out.notify(f"Replication updated.")
                        self._out.warn(
                            f"Removed: {remote_cfg.source} -> {remote_cfg.destination}"
                        )
                        self._out.success(
                            f"Added: {l_cfg.source} -> {l_cfg.destination}")
                    else:
                        self._errors_detected = True
                        # print(f"{self.c.fg_rd}You do not have permission to configure replication from source:"
                        #       f"{self.c.rs} {key}")
                except ClientError:
                    self._utils.validate(
                        False,
                        f"Error detected when attempting to store replication config "
                        f"for {l_cfg.destination}")
                    self._errors_detected = True
            else:
                self._out.success(
                    f"Replication Validated: [[{l_cfg.source} -> {l_cfg.destination}]]"
                )

    def _notify_of_data_repl_orphans(self, config_repl: Dict) -> None:
        """
        Notify user of detected stray replication configurations when using the --replication-only flag.
        :param config_repl: replication configuration block.
        """
        strays: Set[ReplicationConfig] = set()
        notify = False
        for repl in config_repl:
            namespace = self._utils.parse_namespace(config_repl[repl])
            remote_cfgs = self._repl.get_all_configs(namespace)

            if remote_cfgs:
                for cfg in remote_cfgs:
                    if cfg.source not in list(config_repl.keys()) \
                            and cfg.type == REPL_TYPE_APP \
                            and not cfg.source.startswith(shared_ns) \
                            and not cfg.source.startswith(self.context.defaults.service_ns):
                        strays.add(cfg)
                        notify = True

        for stray in strays:
            print(
                f"{self.c.fg_yl}stray replication mapping detected: {self.c.rs}"
                f" {self.c.fg_bl}{stray.source} -> {stray.destination}{self.c.rs}."
            )
        if notify:
            print(
                f"To prune stray replication configs, "
                f"delete the destination, THEN the source with the `figgy config delete` command"
            )

    def _sync_replication(self, config_repl: Dict, expected_destinations: Set,
                          namespace: str):
        """
        Calls sync_repl_configs which adds/removes repl configs. Then searches for stray configurations and notifies
        the user of detected stray configurations.
        Args:
            config_repl: Dict of KV Pairs for a repl config. Source -> Dest
            expected_destinations: expected replication destinations, as defined in merge key sources,
             or shared_figs
            namespace: Namespace to sync replication configs to. E.g. /app/demo-time/
        """

        self._out.notify(f"Validating replication for all parameters.")

        self._sync_repl_configs(config_repl, namespace=namespace)
        self._out.notify(f"\nChecking for stray replication configurations.")
        remote_cfgs = self._repl.get_all_configs(namespace)
        notify = True
        if remote_cfgs:
            for cfg in remote_cfgs:
                if cfg.source not in list(config_repl.keys()) \
                        and cfg.destination not in list(config_repl.values()) \
                        and cfg.destination not in expected_destinations \
                        and (isinstance(cfg.source, list)
                             or cfg.source.startswith(shared_ns) or cfg.source.startswith(
                            self.context.defaults.service_ns)):
                    print(
                        f"{self.c.fg_rd}Stray replication mapping detected: {self.c.rs}"
                        f" {self.c.fg_bl}{cfg.source} -> {cfg.destination}{self.c.rs}."
                    )
                    notify = False
        if notify:
            self._out.success(
                f"No stray replication configs found for: {namespace}")
        else:
            self._out.warn(f"{CLEANUP_REPLICA_ORPHANS}")

    def _validate_merge_keys(self, destination: str, sources: Union[List, str],
                             namespace: str) -> bool:
        """
        Validates merge key sources & destinations
        Args:
            destination: str -> Destination of merge key replication
            sources: List or Str -> Source(e) of this merge key
            namespace: application namespace
        """
        if not destination.startswith(namespace):
            print(
                f"{self.c.fg_rd}Merge config: {self.c.rs}{self.c.fg_bl}{destination}{self.c.rs}{self.c.fg_rd} has a "
                f"destination that is not in your service namespace: "
                f"{self.c.rs}{self.c.fg_bl}{namespace}{self.c.rs}{self.c.fg_rd}. This is invalid.{self.c.rs}"
            )
            self.errors_detected = True
            return False

        if isinstance(sources, list):
            for item in sources:
                if item.startswith(MERGE_KEY_PREFIX):
                    self._utils.validate(
                        item.replace(MERGE_KEY_PREFIX,
                                     "").startswith(namespace),
                        f"Source: {item} in merge config must begin with your namespace: {namespace}."
                    )
                    self.errors_detected = True
                    return False
        else:
            self._utils.validate(
                sources.startswith(namespace),
                f"Source {sources} in merge config must begin with your namespace: {namespace}"
            )
            self.errors_detected = True
            return False

        return True

    def _sync_merge_keys(self, config_merge: Dict, namespace: str) -> None:
        """
            Pushes merge key configs into replication config table.
        Args:
            config_merge: Dict of merge_parameters parsed from figcli.json file
            namespace: namespace for app
        """
        self._out.notify("Validating replication for all merge keys.")
        for key in config_merge:
            self._validate_merge_keys(key, config_merge[key], namespace)

            config = self._repl.get_config_repl(key)
            if not config or (config.source != config_merge[key]):
                try:
                    repl_config = ReplicationConfig(
                        destination=key,
                        run_env=self.run_env,
                        namespace=namespace,
                        source=config_merge[key],
                        type=ReplicationType(REPL_TYPE_MERGE))
                    self._repl.put_config_repl(repl_config)
                except ClientError:
                    self._utils.validate(
                        False,
                        f"Error detected when attempting to store replication config for {key}"
                    )
                    self._errors_detected = True
            else:
                self._out.success(
                    f"Merge key replication config validated: [[{key}]]")

    def _validate_expected_names(self, all_names: Set, repl_conf: Dict,
                                 merge_conf: Dict):
        self._out.notify(f"Validating shared keys exist.")
        print_resolution_message = False
        merged_confs = {**repl_conf, **merge_conf}
        for name in all_names:
            if self.__get_param_encrypted(name) is None:
                awaiting_repl = False
                for cnf in merged_confs:
                    if name == cnf or name in list(repl_conf.values()):
                        self._out.print(
                            f"\nConfig value [[{name}]] is a destination for replication, but doesn't exist"
                            f" yet. If you commit now your build could fail. This will auto-resolve itself if all of "
                            f"its dependencies exist. This will probably resolve itself in a few seconds. "
                            f"Try re-running sync.")
                        awaiting_repl = True
                        break

                if not awaiting_repl:
                    self._out.print(
                        f"Config value of [[{name}]] does not exist and is expected based on "
                        f"your defined configuration.")
                    print_resolution_message = True
                    self._errors_detected = True

        if print_resolution_message:
            self._out.error(f"{SHARED_NAME_RESOLUTION_MESSAGE}")
        else:
            self._out.success("Shared keys have been validated.")

    def _can_replicate_from(self, source: str):
        try:
            if self.__get_param_encrypted(source) is not None:
                return True
            else:
                self._out.warn(
                    f"Replication source: [[{source}]] is missing from ParameterStore. "
                    f"It must be added before config replication can be configured.\n"
                )
                self._input_config_values({source})
                return True
        except ClientError as e:
            denied = "AccessDeniedException" == e.response['Error']['Code']
            if denied and "AWSKMS; Status Code: 400;" in e.response['Error'][
                    'Message']:
                self._out.error(
                    f"You do not have access to decrypt the value of Name: [[{source}]]"
                )
            elif denied:
                self._out.error(
                    f"You do not have access to Parameter: [[{source}]]")
            else:
                raise
        return False

    def __get_param_encrypted(self, source: str) -> Optional[str]:
        try:
            return self._ssm.get_parameter_encrypted(source)
        except ClientError as e:
            denied = "AccessDeniedException" == e.response['Error']['Code']
            if denied and "AWSKMS; Status Code: 400;" in e.response['Error'][
                    'Message']:
                self._out.error(
                    f"You do not have access to decrypt the value of Name: [[{source}]]"
                )
                return None
            elif denied:
                self._utils.error_exit(
                    f"You do not have access to Parameter: {source}")
            else:
                raise

    def _validate_replication_config(self,
                                     config_repl: Dict,
                                     app_conf: bool = True):
        """
        Validates replication config blocks are valid / legal. Prevents people from setting up replication from
        disallowed namespaces, etc. Exits with error if invalid config is discovered.

        Args:
            config_repl: Dict of KV Pairs for a repl config. Source -> Dest
            app_conf: bool: T/F - True if this is an application config block in an application config (figgy.json).
                    False if other, which for now is only repl-configs for data teams.
        """
        for key in config_repl:
            if app_conf:
                self._utils.validate(
                    re.match(
                        f'^/shared/.*$|^{self.context.defaults.service_ns}/.*$',
                        key) is not None,
                    f"The SOURCE of your replication configs must begin with `/shared/` or "
                    f"`{self.context.defaults.service_ns}/`. "
                    f"{key} is non compliant.")

            self._utils.validate(
                re.match(f'^{self.context.defaults.service_ns}/.*$',
                         config_repl[key]) is not None,
                f"The DESTINATION of your replication configs must always begin with "
                f"`{self.context.defaults.service_ns}/`")

    def _find_missing_shared_figs(self, namespace: str, config_repl: Dict,
                                  shared_names: set, merge_conf: Dict):
        """
            Notifies the user if there is a parameter that has been shared into their namespace by an outside party
            but they have not added it to the `shared_figs` block of their figgy.json
        """
        all_repl_cfgs = self._repl.get_all_configs(namespace)
        for cfg in all_repl_cfgs:
            in_merge_conf = self._in_merge_value(cfg.destination, merge_conf)

            if cfg.destination not in shared_names and cfg.type == REPL_TYPE_APP \
                    and cfg.destination not in config_repl.values() and not in_merge_conf:
                print(
                    f"It appears that {self.c.fg_bl}{cfg.user}{self.c.rs} shared "
                    f"{self.c.fg_bl}{cfg.source}{self.c.rs} to {self.c.fg_bl}{cfg.destination}{self.c.rs} "
                    f"and you have not added {self.c.fg_bl}{cfg.destination}{self.c.rs} to the "
                    f"{self.c.fg_bl}{SHARED_KEY}{self.c.rs} section of your figgy.json. This is also not "
                    f"referenced in any defined merge parameter. Please add "
                    f"{self.c.fg_bl}{cfg.destination}{self.c.rs} to your figgy.json, or delete this parameter "
                    f"and the replication config with the prune command.")

    def _in_merge_value(self, dest: str, merge_conf: Dict):
        for key in merge_conf:
            value = merge_conf[key]
            # 'value' can be a list or a str, but the way 'in' operates, this works either way. #dynamic programming
            for suffix in merge_suffixes:
                if f"${'{'}{dest}{suffix}{'}'}" in value:
                    return True

        return False

    def _fill_repl_conf_variables(self, repl_conf: Dict) -> Dict:
        repl_copy = {}
        all_vars = []
        for key, val in repl_conf.items():
            all_vars = all_vars + re.findall(r'\${(\w+)}', key)
            all_vars = all_vars + re.findall(r'\${(\w+)}', key)

        all_vars = set(all_vars)
        if all_vars:
            print(
                f"{self.c.fg_bl}{len(all_vars)} variables detected in: {self.c.rs}{self.c.fg_yl}"
                f"{self._config_path}{self.c.rs}\n")

        template_vals = {}
        for var in all_vars:
            print(f"Template variable: {self.c.fg_bl}{var}{self.c.rs} found.")
            input_val = Input.input(
                f"Please input a value for {self.c.fg_bl}{var}{self.c.rs}: ",
                min_length=1)
            template_vals[var] = input_val

        for key, val in repl_conf.items():
            updated_key = key
            updated_val = val

            for template_key, template_val in template_vals.items():
                updated_key = updated_key.replace(f"${{{template_key}}}",
                                                  template_val)
                updated_val = updated_val.replace(f"${{{template_key}}}",
                                                  template_val)

            repl_copy[updated_key] = updated_val
            repl_copy[updated_key] = updated_val

        return repl_copy

    def run_ci_sync(self) -> None:
        """
            Orchestrates a standard `sync` command WITHOUT The `--replication-only` flag set.
        """
        # Validate & parse figgy.json
        config = self._utils.get_ci_config(self._config_path)
        shared_names = set(
            self._utils.get_config_key_safe(SHARED_KEY, config, default=[]))
        repl_conf = self._utils.get_config_key_safe(REPLICATION_KEY,
                                                    config,
                                                    default={})
        repl_from_conf = self._utils.get_config_key_safe(REPL_FROM_KEY,
                                                         config,
                                                         default={})
        merge_conf = self._utils.get_config_key_safe(MERGE_KEY,
                                                     config,
                                                     default={})
        config_keys = set(
            self._utils.get_config_key_safe(CONFIG_KEY, config, default=[]))
        namespace = self._utils.get_namespace(config)
        merge_keys = set(merge_conf.keys())
        all_keys = KeyUtils.find_all_expected_names(config_keys, shared_names,
                                                    merge_conf, repl_conf,
                                                    repl_from_conf, namespace)

        repl_conf = KeyUtils.merge_repl_and_repl_from_blocks(
            repl_conf, repl_from_conf, namespace)
        # Add missing config values
        self._out.notify(
            f"Validating all configuration keys exist in ParameterStore.")
        self._input_config_values(config_keys)

        # Sync keys between PS / Local config
        print()
        self._sync_keys(namespace, all_keys)

        print()

        self._find_missing_shared_figs(namespace, repl_conf, shared_names,
                                       merge_conf)

        # Disabling requirement (for now) of replication to be in /replicated path
        # print()
        self._validate_replication_config(repl_conf, app_conf=True)

        print()
        # sync replication config
        all_shared_keys = shared_names | set(merge_conf.keys())
        self._sync_replication(repl_conf, all_shared_keys, namespace)

        print()
        self._sync_merge_keys(merge_conf, namespace)

        print()
        # validate expected keys exist
        self._validate_expected_names(all_keys, repl_conf, merge_conf)

    def run_repl_sync(self) -> None:
        """
        Orchestrates sync when the user passes in the `--replication-only` flag.
        """
        self._utils.validate(
            os.path.exists(self._config_path),
            f"Path {self._config_path} is invalid. "
            f"That file does not exist.")
        repl_conf = self._utils.get_repl_config(self._config_path)

        repl_conf = self._fill_repl_conf_variables(repl_conf)
        self._validate_replication_config(repl_conf, app_conf=False)
        self._sync_repl_configs(repl_conf)
        self._notify_of_data_repl_orphans(repl_conf)

    @VersionTracker.notify_user
    @AnonymousUsageTracker.track_command_usage
    def execute(self):
        print()
        if self._replication_only:
            self.run_repl_sync()
        else:
            self.run_ci_sync()

        if self._errors_detected:
            self._out.error_h2(
                'Sync failed. Please address the outputted errors.')
        else:
            self._out.success_h2('Sync completed with no errors!')
Exemplo n.º 3
0
class Delete(ConfigCommand):

    def __init__(self, ssm_init: SsmDao, cfg_view: RBACLimitedConfigView,
                 config_init: ConfigDao, repl_init: ReplicationDao, context: ConfigContext, colors_enabled: bool,
                 config_completer: WordCompleter):
        super().__init__(delete, colors_enabled, context)
        self._ssm = ssm_init
        self._config = config_init
        self._repl = repl_init
        self._utils = Utils(colors_enabled)
        self._config_completer = config_completer
        self._out = Output(colors_enabled)
        self._cfg_view = cfg_view

    def delete_param(self, key) -> bool:
        """
        Manages safe deletion through the CLI. Prevents deletion of replication sources. Prompts user for deletion of
        replication destinations.
        Args:
            key: PS Name / Key

        Returns: bool - T/F based on whether a parameter was actually deleted.
        """
        sources = self._repl.get_cfgs_by_src(key)  # type: List[ReplicationConfig]
        repl_conf = self._repl.get_config_repl(key)  # type: ReplicationConfig

        if len(sources) > 0:
            self._out.error(f"You're attempting to delete a key that is the source for at least one "
                            f"replication config.\n[[{key}]] is actively replicating to these"
                            f" destinations:\n")
            for src in sources:
                self._out.warn(f"Dest: [[{src.destination}]]. This config was created by [[{src.user}]]. ")

            self._out.print(
                f"\r\n[[{key}]] is a replication SOURCE. Deleting this source would effectively BREAK "
                f"replication to the above printed destinations. You may NOT delete sources that are actively "
                f"replicating. Please delete the above printed DESTINATIONS first. "
                f"Once they have been deleted, you will be allowed to delete this "
                f"SOURCE.")
            return False
        elif repl_conf is not None:
            selection = "unselected"
            while selection.lower() != "y" and selection.lower() != "n":
                repl_msg = [
                    (f'class:{self.c.rd}', f"{key} is an active replication destination created by "),
                    (f'class:{self.c.bl}', f"{repl_conf.user}. "),
                    (f'class:{self.c.rd}', f"Do you want to ALSO delete this replication config and "
                                           f"permanently delete {key}? "),
                    (f'class:', "(y/N): ")]
                selection = prompt(repl_msg, completer=WordCompleter(['Y', 'N']), style=FIGGY_STYLE)
                selection = selection if selection != '' else 'n'
                if selection.strip().lower() == "y":
                    self._repl.delete_config(key)
                    self._ssm.delete_parameter(key)
                    self._out.success(f"[[{key}]] and replication config destination deleted successfully.")
                    return True
                elif selection.strip().lower() == "n":
                    return False

        else:
            try:
                self._ssm.delete_parameter(key)
            except ClientError as e:
                if e.response['Error']['Code'] == 'ParameterNotFound':
                    pass
                elif "AccessDeniedException" == e.response['Error']['Code']:
                    self._out.error(f"You do not have permissions to delete: {key}")
                    return False
                else:
                    raise

            print(f"{self.c.fg_gr}{key} deleted successfully.{self.c.rs}\r\n")
            return True

    def _delete_param(self):
        """
        Prompts user for a parameter name to delete, then deletes
        """
        # Add all keys
        key, notify, delete_another = None, False, True

        while delete_another:
            key = Input.input('PS Name to Delete: ', completer=self._config_completer)
            try:
                if self.delete_param(key):
                    if key in self._config_completer.words:
                        self._config_completer.words.remove(key)
                else:
                    continue
            except ClientError as e:
                error_code = e.response['Error']['Code']
                if "AccessDeniedException" == error_code:
                    self._out.error(f"\n\nYou do not have permissions to delete config values at the path: [[{key}]]")
                    self._out.warn(f"Your role of {self.context.role} may delete keys under the following namespaces: "
                                   f"{self._cfg_view.get_authorized_namespaces()}")
                    self._out.print(f"Error message: {e.response['Error']['Message']}")
                elif "ParameterNotFound" == error_code:
                    self._out.error(f"The specified Name: [[{key}]] does not exist in the selected environment. "
                                    f"Please try again.")
                else:
                    self._out.error(f"Exception caught attempting to delete config: {e.response['Message']}")

            print()
            to_continue = input(f"Delete another? (Y/n): ")
            to_continue = to_continue if to_continue != '' else 'y'
            delete_another = to_continue.lower() == "y"

    @VersionTracker.notify_user
    @AnonymousUsageTracker.track_command_usage
    def execute(self):
        self._delete_param()
Exemplo n.º 4
0
class Share(ConfigCommand):
    def __init__(self, ssm_init, repl_init: ReplicationDao,
                 config_completer_init, colors_enabled: bool,
                 config_context: ConfigContext):
        super().__init__(share, colors_enabled, config_context)

        self._ssm = ssm_init
        self._repl = repl_init
        self._config_completer = config_completer_init
        self._utils = Utils(colors_enabled)
        self._out = Output(colors_enabled)

    def _share_param(self):
        """
        Enables sharing of parameters from one namespace to the /app/service-name/replicated namespace.
        Args:
            run_env: Run Environment
        """

        source_name_msg = [(f'class:{self.c.bl}',
                            'Input the PS Name you wish to share: ')]

        dest_name_msg = [(f'class:{self.c.bl}',
                          'Input the destination of the shared value: ')]

        share_another = True
        while share_another:
            print()
            key = prompt(source_name_msg,
                         completer=self._config_completer,
                         style=FIGGY_STYLE)
            if re.match(f"{self.context.defaults.service_ns}/.*", key):
                self._out.error(
                    f"The SOURCE of replication may not be from within the "
                    f"[[{self.context.defaults.service_ns}/]] namespace.\n")
                continue

            dest = prompt(dest_name_msg,
                          completer=self._config_completer,
                          style=FIGGY_STYLE)
            key_value = None
            try:
                key_value = self._ssm.get_parameter(key)
            except ClientError as e:
                denied = "AccessDeniedException" == e.response['Error']['Code']
                if denied and "AWSKMS; Status Code: 400;" in e.response[
                        'Error']['Message']:
                    self._out.error(
                        f"You do not have access to decrypt the value of Name: [[{key}]]"
                    )
                elif denied:
                    self._out.error(
                        f"You do not have access to Name: [[{key}]]")
                else:
                    raise

                self._utils.validate(
                    key_value is not None,
                    "Either the Name you provided to share does not exist or you do not have the "
                    "proper permissions to share the provided Name.")

            namespace = self._utils.parse_namespace(dest)
            repl_config = ReplicationConfig(destination=dest,
                                            env_alias=self.run_env.env,
                                            namespace=namespace,
                                            source=key,
                                            type=ReplicationType.APP.value)
            self._repl.put_config_repl(repl_config)
            self._out.success(f"[[{key}]] successfully shared.")
            to_continue = input(f"Share another? (y/N): ")
            to_continue = to_continue if to_continue != '' else 'n'
            share_another = to_continue.lower() == "y"

    @VersionTracker.notify_user
    @AnonymousUsageTracker.track_command_usage
    def execute(self):
        self._share_param()
Exemplo n.º 5
0
class Restore(ConfigCommand):
    def __init__(self, ssm_init: SsmDao, kms_init: KmsService,
                 config_init: ConfigDao, repl_dao: ReplicationDao,
                 audit_dao: AuditDao, cfg_view: RBACLimitedConfigView,
                 colors_enabled: bool, context: ConfigContext,
                 config_completer: WordCompleter, delete: Delete):
        super().__init__(restore, colors_enabled, context)
        self._config_context = context
        self._ssm = ssm_init
        self._kms = kms_init
        self._config = config_init
        self._repl = repl_dao
        self._audit = audit_dao
        self._cfg_view = cfg_view
        self._utils = Utils(colors_enabled)
        self._point_in_time = context.point_in_time
        self._config_completer = config_completer
        self._delete = delete
        self._out = Output(colors_enabled=colors_enabled)

    def _client_exception_msg(self, item: RestoreConfig, e: ClientError):
        if "AccessDeniedException" == e.response["Error"]["Code"]:
            self._out.error(
                f"\n\nYou do not have permissions to restore config at the path: [[{item.ps_name}]]"
            )
        else:
            self._out.error(
                f"Error message: [[{e.response['Error']['Message']}]]")

    def get_parameter_arn(self, parameter_name: str):
        account_id = self._ssm.get_parameter(ACCOUNT_ID_PATH)

        return f"arn:aws:ssm:us-east-1:{account_id}:parameter{parameter_name}"

    def _restore_param(self) -> None:
        """
        Allow the user to query a parameter store entry from dynamo, so we can query + restore it, if desired.
        """

        table_entries = []

        ps_name = prompt(f"Please input PS key to restore: ",
                         completer=self._config_completer)

        if self._is_replication_destination(ps_name):
            repl_conf = self._repl.get_config_repl(ps_name)
            self._print_cannot_restore_msg(repl_conf)
            exit(0)

        self._out.notify(
            f"\n\nAttempting to retrieve all restorable values of [[{ps_name}]]"
        )
        items: List[RestoreConfig] = self._audit.get_parameter_restore_details(
            ps_name)

        if len(items) == 0:
            self._out.warn(
                "No restorable values were found for this parameter.")
            return

        for i, item in enumerate(items):
            date = time.strftime("%Y-%m-%d %H:%M:%S",
                                 time.localtime(item.ps_time / 1000))

            # we need to decrypt the value, if encrypted, in order to show it to the user
            if item.ps_key_id:
                item.ps_value = self._kms.decrypt_with_context(
                    item.ps_value,
                    {"PARAMETER_ARN": self.get_parameter_arn(item.ps_name)},
                )
            table_entries.append([i, date, item.ps_value, item.ps_user])

        self._out.print(
            tabulate(
                table_entries,
                headers=["Item #", "Time Created", "Value", "User"],
                tablefmt="grid",
                numalign="center",
                stralign="left",
            ))

        valid_options = [f'{x}' for x in range(0, len(items))]
        choice = int(
            Input.select("Select an item number to restore: ",
                         valid_options=valid_options))
        item = items[choice] if items[choice] else None

        restore = Input.y_n_input(
            f"Are you sure you want to restore item #{choice} and have it be the latest version? ",
            default_yes=False)

        if not restore:
            self._utils.warn_exit("Restore aborted.")

        key_id = None if item.ps_type == "String" else item.ps_key_id

        try:
            self._ssm.set_parameter(item.ps_name,
                                    item.ps_value,
                                    item.ps_description,
                                    item.ps_type,
                                    key_id=key_id)

            current_value = self._ssm.get_parameter(item.ps_name)
            if current_value == item.ps_value:
                self._out.success("Restore was successful")
            else:
                self._out.error(
                    "Latest version in parameter store doesn't match what we restored."
                )
                self._out.print(
                    f"Current value: [[{current_value}]].  Expected value: [[{item.ps_value}]]"
                )

        except ClientError as e:
            self._client_exception_msg(item, e)

    def _decrypt_if_applicable(self, entry: RestoreConfig) -> str:
        if entry.ps_type != "String":
            return self._kms.decrypt_with_context(
                entry.ps_value,
                {"PARAMETER_ARN": self.get_parameter_arn(entry.ps_name)})
        else:
            return entry.ps_value

    def _is_replication_destination(self, ps_name: str):
        return self._repl.get_config_repl(ps_name)

    def _restore_params_to_point_in_time(self):
        """
        Restores parameters as they were to a point-in-time as defined by the time provided by the users.
        Replays parameter history to that point-in-time so versioning remains intact.
        """

        repl_destinations = []
        ps_prefix = Input.input(
            f"Which parameter store prefix would you like to recursively restore? "
            f"(e.g., /app/demo-time): ",
            completer=self._config_completer)

        authed_nses = self._cfg_view.get_authorized_namespaces()
        valid_prefix = (
            [True for ns in authed_nses if ps_prefix.startswith(ns)]
            or [False])[0]
        self._utils.validate(
            valid_prefix,
            f"Selected namespace must begin with a 'Fig Tree' you have access to. "
            f"Such as: {authed_nses}")

        time_selected, time_converted = None, None
        try:
            time_selected = Input.input(
                "Seconds since epoch to restore latest values from: ")
            time_converted = datetime.fromtimestamp(float(time_selected))
        except ValueError as e:
            if "out of range" in e.args[0]:
                try:
                    time_converted = datetime.fromtimestamp(
                        float(time_selected) / 1000)
                except ValueError as e:
                    self._utils.error_exit(
                        "Make sure you're using a format of either seconds or milliseconds since epoch."
                    )
            elif "could not convert" in e.args[0]:
                self._utils.error_exit(
                    f"The format of this input should be seconds since epoch. (e.g., 1547647091)\n"
                    f"Try using: https://www.epochconverter.com/ to convert your date to this "
                    f"specific format.")
            else:
                self._utils.error_exit(
                    "An unexpected exception triggered: "
                    f"'{e}' while trying to convert {time_selected} to 'datetime' format."
                )

        self._utils.validate(
            time_converted is not None,
            f"`{CLI_NAME}` encountered an error parsing your input for "
            f"target rollback time.")
        keep_going = Input.y_n_input(
            f"Are you sure you want to restore all figs under {ps_prefix} values to their state at: "
            f"{time_converted}? ",
            default_yes=False)

        if not keep_going:
            self._utils.warn_exit("Aborting restore due to user selection")

        ps_history: PSHistory = self._audit.get_parameter_history_before_time(
            time_converted, ps_prefix)
        restore_count = len(ps_history.history.values())

        if len(ps_history.history.values()) == 0:
            self._utils.warn_exit(
                "No results found for time range.  Aborting.")

        last_item_name = 'Unknown'
        try:
            for item in ps_history.history.values():
                last_item_name = item.name

                if self._is_replication_destination(item.name):
                    repl_destinations.append(item.name)
                    continue

                if item.cfg_at(time_converted).ps_action == SSM_PUT:
                    cfgs_before: List[RestoreConfig] = item.cfgs_before(
                        time_converted)
                    cfg_at: RestoreConfig = item.cfg_at(time_converted)
                    ssm_value = self._ssm.get_parameter(item.name)
                    dynamo_value = self._decrypt_if_applicable(cfg_at)

                    if ssm_value != dynamo_value:
                        if ssm_value is not None:
                            self._ssm.delete_parameter(item.name)

                        for cfg in cfgs_before:
                            decrypted_value = self._decrypt_if_applicable(cfg)
                            self._out.print(
                                f"\nRestoring: [[{cfg.ps_name}]] \nValue: [[{decrypted_value}]]"
                                f"\nDescription: [[{cfg.ps_description}]]\nKMS Key: "
                                f"[[{cfg.ps_key_id if cfg.ps_key_id else '[[No KMS Key Specified]]'}]]"
                            )
                            self._out.notify(
                                f"Replaying version: [[{cfg.ps_version}]] of [[{cfg.ps_name}]]"
                            )
                            print()

                            self._ssm.set_parameter(cfg.ps_name,
                                                    decrypted_value,
                                                    cfg.ps_description,
                                                    cfg.ps_type,
                                                    key_id=cfg.ps_key_id)
                    else:
                        self._out.success(
                            f"Config: {item.name} is current. Skipping.")
                else:
                    # This item must have been a delete, which means this config didn't exist at that time.
                    self._out.print(
                        f"Checking if [[{item.name}]] exists. It was previously deleted."
                    )
                    self._prompt_delete(item.name)
        except ClientError as e:
            if "AccessDeniedException" == e.response["Error"]["Code"]:
                self._utils.error_exit(
                    f"\n\nYou do not have permissions to restore config at the path:"
                    f" [[{last_item_name}]]")
            else:
                self._utils.error_exit(
                    f"Caught error when attempting restore. {e}")

        for item in repl_destinations:
            cfg = self._repl.get_config_repl(item)
            self._print_cannot_restore_msg(cfg)

        print("\n\n")
        if not repl_destinations:
            self._out.success_h2(
                f"[[{restore_count}]] configurations restored successfully!")
        else:
            self._out.warn(
                f"\n\n[[{len(repl_destinations)}]] configurations were not restored because they are shared "
                f"from other destinations. To restore them, restore their sources."
            )
            self._out.success(
                f"{restore_count - len(repl_destinations)} configurations restored successfully."
            )

    def _print_cannot_restore_msg(self, repl_conf: ReplicationConfig):
        self._out.print(
            f"Parameter: [[{repl_conf.destination}]] is a shared parameter. ")
        self._out.print(f"Shared From: [[{repl_conf.source}]]")
        self._out.print(f"Shared by: [[{repl_conf.user}]]")
        self._out.warn(
            f"To restore this parameter you should restore the source: {repl_conf.source} instead!"
        )
        print()

    def _prompt_delete(self, name):
        param = self._ssm.get_parameter_encrypted(name)
        if param:
            selection = Input.y_n_input(
                f"PS Name: {name} did not exist at this restore time."
                f" Delete it? ",
                default_yes=False)

            if selection:
                self._delete.delete_param(name)

    @VersionTracker.notify_user
    @AnonymousUsageTracker.track_command_usage
    def execute(self):
        if self._point_in_time:
            self._restore_params_to_point_in_time()
        else:
            self._restore_param()
Exemplo n.º 6
0
class Promote(ConfigCommand):

    def __init__(self, source_ssm: SsmDao, config_completer_init: WordCompleter,
                 colors_enabled: bool, config_context: ConfigContext, session_mgr: SessionManager):
        super().__init__(promote, colors_enabled, config_context)
        self.config_context = config_context
        self._source_ssm = source_ssm
        self._session_mgr = session_mgr
        self._config_completer = config_completer_init
        self._utils = Utils(colors_enabled)
        self._out = Output(colors_enabled)

    def _promote(self):
        repeat = True
        parameters: List[Dict] = []
        while repeat:
            namespace = Input.input("Please input a namespace prefix to promote:"
                               f" (i.e. {self.context.defaults.service_ns}/foo/): ", completer=self._config_completer)
            if not self._utils.is_valid_input(namespace, "namespace", notify=False):
                continue

            try:
                parameters: List[Dict] = self._source_ssm.get_all_parameters([namespace])

                if not parameters and self._source_ssm.get_parameter(namespace):
                    parameters, latest_version = self._source_ssm.get_parameter_details(namespace)
                    parameters = list(parameters)

                if parameters:
                    repeat = False
                else:
                    self._out.warn("\nNo parameters found. Try again.\n")
            except ClientError as e:
                print(f"{self.c.fg_rd}ERROR: >> {e}{self.c.rs}")
                continue

        self._out.notify(f'\nFound [[{len(parameters)}]] parameter{"s" if len(parameters) > 1 else ""} to migrate.\n')

        assumable_roles = self.context.defaults.assumable_roles
        matching_roles = list(set([x for x in assumable_roles if x.role == self.config_context.role]))
        valid_envs = set([x.run_env.env for x in matching_roles])
        valid_envs.remove(self.run_env.env)  # Remove current env, we can't promote from dev -> dev
        next_env = Input.select(f'Please select the destination environment.', valid_options=list(valid_envs))

        matching_role = [role for role in matching_roles if role.run_env == RunEnv(env=next_env)][0]
        env: GlobalEnvironment = GlobalEnvironment(role=matching_role, region=self.config_context.defaults.region)
        dest_ssm = SsmDao(self._session_mgr.get_session(env, prompt=False).client('ssm'))

        for param in parameters:
            if 'KeyId' in param:
                self._out.print(f"Skipping param: [[{param['Name']}]]. It is encrypted and cannot be migrated.")
            else:
                promote_it = Input.y_n_input(f"Would you like to promote: {param['Name']}?",
                                             default_yes=True)

                if promote_it:
                    val = self._source_ssm.get_parameter(param['Name'])
                    description = param.get('Description', "")
                    dest_ssm.set_parameter(param['Name'], val, description, SSM_STRING)
                    self._out.success(f"Successfully promoted [[{param['Name']}]] to [[{next_env}]].\r\n")

    @VersionTracker.notify_user
    @AnonymousUsageTracker.track_command_usage
    def execute(self):
        self._promote()
Exemplo n.º 7
0
class Prune(ConfigCommand):
    """
    Detects stray ParameterStore names, replication configurations, and merge keys, then
    prompts the user to delete them. This is typically run after the `sync` command informs
    the user that there are stray configurations.
    """
    def __init__(self,
                 ssm: SsmDao,
                 ddb: ConfigDao,
                 repl_dao: ReplicationDao,
                 context: ConfigContext,
                 config_completer_init: WordCompleter,
                 colors_enabled: bool,
                 delete: Delete,
                 args=None):
        super().__init__(prune, colors_enabled, context)
        self._ssm = ssm  # type: SsmDao
        self._config_dao = ddb  # type: ConfigDao
        self._repl = repl_dao
        self._config_completer = config_completer_init  # type: WordCompleter
        self._utils = Utils(colors_enabled)
        self.example = f"{self.c.fg_bl}{CLI_NAME} config {self.command_printable} --env dev " \
            f"--config /path/to/figgy.json{self.c.rs}"
        self._config_path = context.ci_config_path if context.ci_config_path else Utils.find_figgy_json(
        )
        self._out = Output(colors_enabled)

        # If user passes in --info flag, we don't need all of this to be initialized.
        if not hasattr(args, info.name) or args.info is False:
            # Validate & parse figgy.json
            self._config = self._utils.get_ci_config(
                self._config_path)  # type: Dict
            self._shared_names = set(
                self._utils.get_config_key_safe(SHARED_KEY,
                                                self._config,
                                                default=[]))  # type: Set
            self._repl_conf = self._utils.get_config_key_safe(
                REPLICATION_KEY, self._config, default={})  # type: Dict
            self._merge_conf = self._utils.get_config_key_safe(
                MERGE_KEY, self._config, default={})  # type: Dict
            self._config_keys = set(
                self._utils.get_config_key_safe(CONFIG_KEY,
                                                self._config,
                                                default=[]))  # type: Set
            self._merge_keys = set(self._merge_conf.keys())  # type: Set
            self._namespace = self._utils.get_namespace(
                self._config)  # type: str
            self._delete_command = delete
            self._repl_from_conf = self._utils.get_config_key_safe(
                REPL_FROM_KEY, self._config, default={})
            self._repl_conf = KeyUtils.merge_repl_and_repl_from_blocks(
                self._repl_conf, self._repl_from_conf, self._namespace)

            # Build list of all keys found across all config types
            self._all_keys = KeyUtils().find_all_expected_names(
                self._config_keys, self._shared_names, self._merge_conf,
                self._repl_conf, self._repl_from_conf, self._namespace)

    # Prompts for this file
    def _cleanup_parameters(self, config_keys: Set):
        """
        Prompts user for prune of stray ParameterStore names.
        Args:
            config_keys: set() -> Set of parameters that are found as defined in the figgy.json file for a svc
        """

        self._out.notify(f"Checking for stray config names.\r\n")

        # Find & Prune stray keys
        ps_keys = set(
            list(
                map(lambda x: x['Name'],
                    self._ssm.get_all_parameters([self._namespace]))))
        ps_only_keys = ps_keys.difference(config_keys)
        for key in ps_only_keys:
            selection = Input.y_n_input(
                f"{key} exists in ParameterStore but does not exist "
                f"in your config, do you want to delete it?",
                default_yes=False)

            if selection:
                self._delete_command.delete_param(key)
            else:
                self._out.notify("OK, skipping due to user selection.")
        if not ps_only_keys:
            print(f"{self.c.fg_bl}No stray keys found.{self.c.rs}")

    def _cleanup_replication(self) -> None:
        """
        Cleans up stray replication and merge configurations.
        Args:
            config_repl: The replication config dictionary as parsed from the figgy.json file
            shared_names: Expected parameters as defined in the figgy.json
            config_merge: The merge config dict as defined
            run_env: RunEnv object
            namespace: str -> /app/service-name as defined or parsed from the figgy.json file.
        """

        self._out.notify(f"Checking for stray replication configs.")
        remote_cfgs = self._repl.get_all_configs(self._namespace)
        notify = True
        if remote_cfgs:
            for cfg in remote_cfgs:
                if cfg.source not in list(self._repl_conf.keys()) \
                        and cfg.destination not in list(self._repl_conf.values()) \
                        and cfg.destination not in self._shared_names \
                        and cfg.destination not in list(self._merge_conf.keys()) \
                        and (isinstance(cfg.source, list) or cfg.source.startswith(shared_ns)
                             or cfg.source.startswith(self.context.defaults.service_ns)):
                    notify = False

                    selection = "unselected"
                    while selection.lower() != "y" and selection.lower(
                    ) != "n":
                        selection = input(
                            f"Remote replication config with {self.c.fg_bl}{self._namespace}{self.c.rs} replication "
                            f"mapping of: {self.c.fg_bl}{cfg.source} -> {cfg.destination}{self.c.rs} does not "
                            f"exist in your figgy.json. Should this be removed? (y/N): "
                        ).lower()
                        selection = selection if selection != '' else 'n'
                        if selection == "y":
                            self._repl.delete_config(cfg.destination)
        if notify:
            self._out.success(
                "No remote replication configs found available for prune under namespace: "
                f"[[{self._namespace}]]")

    @VersionTracker.notify_user
    @AnonymousUsageTracker.track_command_usage
    def execute(self):
        # prune service configs
        print()
        self._cleanup_parameters(set(self._all_keys))

        print()
        # prune replication configs
        self._cleanup_replication()
Exemplo n.º 8
0
class Upgrade(MaintenanceCommand):
    """
    Drives the --version command
    """
    def __init__(self, maintenance_context: MaintenanceContext,
                 config_service: Optional[ConfigService]):
        super().__init__(version, maintenance_context.defaults.colors_enabled,
                         maintenance_context)
        self.tracker = VersionTracker(self.context.defaults, config_service)
        self.upgrade_mgr = UpgradeManager(
            maintenance_context.defaults.colors_enabled)
        self._utils = Utils(
            colors_enabled=maintenance_context.defaults.colors_enabled)
        self._out = Output(
            colors_enabled=maintenance_context.defaults.colors_enabled)

    def upgrade(self):
        latest_version: FiggyVersionDetails = self.tracker.get_version()
        install_success, upgrade_it = False, True

        if self.upgrade_mgr.is_pip_install():
            self._out.error(
                f"Figgy appears to have been installed with pip. Please upgrade [[{CLI_NAME}]] with "
                f"`pip` instead.")
            self._out.print(
                f"\n\n[[Try this command]]: pip install figgy-cli --upgrade")

            self._out.print(
                f"\n\nPip based [[{CLI_NAME}]] installations do not support automatic upgrades and "
                f"instead require pip-managed upgrades; however,  Homebrew, one-line, and manual "
                f"installations support auto-upgrade. Please consider installing figgy through one "
                f"of these other methods to take advantage of this feature. "
                f"It will save you time, help keep you up-to-date, and enable important features like "
                f"release-rollbacks and canary releases! "
                f"[[https://www.figgy.dev/docs/getting-started/install/]]")
            sys.exit(0)

        install_path = self.upgrade_mgr.install_path

        if not install_path:
            self._utils.error_exit(
                f"Unable to detect local figgy installation. Please reinstall figgy and follow one "
                f"of the recommended installation procedures.")

        if latest_version.version == VERSION:
            self._out.success(
                f'You are currently using the latest version of [[{CLI_NAME}]]: [[{VERSION}]]'
            )
            upgrade_it = False
        elif self.tracker.upgrade_available():
            self._out.notify_h2(
                f"New version: [[{latest_version.version}]] is more recent than your version: [[{VERSION}]]"
            )
            upgrade_it = True
        elif not self.tracker.cloud_version_compatible_with_upgrade():
            self._out.notify_h2(
                f"Version [[{self.tracker.get_version().version}]] of the Figgy CLI is available but your "
                f"current version of Figgy Cloud ([[{self.tracker.current_cloud_version()}]]) is not compatible."
                f" Your administrator must first update FiggyCloud to at least version: "
                f"[[{self.tracker.required_cloud_version()}]] before you can upgrade Figgy."
            )
            upgrade_it = False
        else:
            self._out.notify_h2(
                f"Your version: [[{VERSION}]] is more recent then the current recommended version "
                f"of {CLI_NAME}: [[{latest_version.version}]]")
            upgrade_it = Input.y_n_input(
                f'Would you like to revert to the current recommended version '
                f'of {CLI_NAME}?')

        if upgrade_it:
            if self._utils.is_mac():
                self._out.print(
                    f"\nMacOS auto-upgrade is supported. Performing auto-upgrade."
                )
                install_success = self.install_mac(latest_version)
            elif self._utils.is_linux():
                self._out.print(
                    f"\nLinux auto-upgrade is supported. Performing auto-upgrade."
                )
                install_success = self.install_linux(latest_version)
            elif self._utils.is_windows():
                self._out.print(
                    f"\nWindows auto-upgrade is supported. Performing auto-upgrade."
                )
                install_success = self.install_windows(latest_version)

            if install_success:
                self._out.success(
                    f"Installation successful! Exiting. Rerun `[[{CLI_NAME}]]` "
                    f"to use the latest version!")
            else:
                self._out.warn(
                    f"\nUpgrade may not have been successful. Check by re-running "
                    f"[[`{CLI_NAME}` --version]] to see if it was. If it wasn't, please reinstall [[`{CLI_NAME}`]]. "
                    f"See {INSTALL_URL}.")

    def install_mac(self, latest_version: FiggyVersionDetails) -> bool:
        install_path = '/usr/local/bin/figgy'

        if self.upgrade_mgr.is_brew_install():
            self._out.notify_h2(f"Homebrew installation detected!")

            print(
                f"This upgrade process will not remove your brew installation but will instead unlink it. "
                f"Going forward you will no longer need homebrew to manage {CLI_NAME}. Continuing is recommended.\n"
            )

            selection = Input.y_n_input(f"Continue? ", default_yes=True)
        else:
            selection = True

        if selection:
            self.upgrade_mgr.install_onedir(install_path,
                                            latest_version.version, MAC)
            return True
        else:
            self._out.print(
                f'\n[[Auto-upgrade aborted. To upgrade through brew run:]] \n'
                f'-> brew upgrade figtools/figgy/figgy')
            self._out.warn(
                f"\n\nYou may continue to manage [[{CLI_NAME}]] through Homebrew, but doing so will "
                f"limit some upcoming functionality around canary releases, rollbacks, and dynamic "
                f"version-swapping.")
            return False

    def install_linux(self, latest_version: FiggyVersionDetails) -> bool:
        install_path = self.upgrade_mgr.install_path
        self.upgrade_mgr.install_onedir(install_path, latest_version.version,
                                        LINUX)
        return True

    def install_windows(self, latest_version: FiggyVersionDetails) -> bool:
        install_path = self.upgrade_mgr.install_path
        self.upgrade_mgr.install_onedir(install_path, latest_version.version,
                                        WINDOWS)
        return True

    @AnonymousUsageTracker.track_command_usage
    def execute(self):
        self.upgrade()
Exemplo n.º 9
0
class Validate(ConfigCommand):
    def __init__(self, ssm_init: SsmDao, colors_enabled: bool,
                 context: ConfigContext):
        super().__init__(validate, colors_enabled, context)
        self._ssm = ssm_init
        self._config_path = context.ci_config_path if context.ci_config_path else Utils.find_figgy_json(
        )
        self._utils = Utils(colors_enabled)
        self._replication_only = context.replication_only
        self._errors_detected = False
        self.example = f"{self.c.fg_bl}{CLI_NAME} config {self.command_printable} " \
                       f"--env dev --config /path/to/config{self.c.rs}"
        self._FILE_PREFIX = "file://"
        self._out = Output(colors_enabled)

    def _validate(self):
        missing_key = False
        config = self._utils.get_ci_config(self._config_path)
        shared_names = set(
            self._utils.get_config_key_safe(SHARED_KEY, config, default=[]))
        repl_conf = self._utils.get_config_key_safe(REPLICATION_KEY,
                                                    config,
                                                    default={})
        repl_from_conf = self._utils.get_config_key_safe(REPL_FROM_KEY,
                                                         config,
                                                         default={})
        merge_conf = self._utils.get_config_key_safe(MERGE_KEY,
                                                     config,
                                                     default={})
        config_keys = set(
            self._utils.get_config_key_safe(CONFIG_KEY, config, default=[]))
        namespace = self._utils.get_namespace(config)
        all_names = KeyUtils.find_all_expected_names(config_keys, shared_names,
                                                     merge_conf, repl_conf,
                                                     repl_from_conf, namespace)

        all_params = self._ssm.get_all_parameters([namespace])

        all_param_names = []
        for param in all_params:
            all_param_names.append(param['Name'])

        print()
        for name in all_names:
            if name not in all_param_names:
                self._out.warn(
                    f"Fig missing from [[{self.run_env}]] environment Parameter Store: [[{name}]]"
                )
                missing_key = True
            else:
                self._out.print(f"Fig found in ParameterStore: [[{name}]].")

        if missing_key:
            print("\n\n")
            self._utils.error_exit(f"{MISSING_PS_NAME_MESSAGE}")
        else:
            self._out.success(
                f"\nSuccess! All figs have been located in the [[{self.run_env}]] ParameterStore!"
            )

    @VersionTracker.notify_user
    @AnonymousUsageTracker.track_command_usage
    def execute(self):
        self._validate()
Exemplo n.º 10
0
class Login(HelpCommand, ABC):
    """
    Log the user into every possible environment they have access to. Sessions are cached.
    This improves figgy performance throughout the day.
    """
    def __init__(self, help_context: HelpContext, figgy_setup: FiggySetup,
                 figgy_context: FiggyContext):
        super().__init__(login, Utils.not_windows(), help_context)
        self._setup = figgy_setup
        self._defaults: CLIDefaults = figgy_setup.get_defaults()
        self._figgy_context = figgy_context
        self._utils = Utils(self._defaults.colors_enabled)
        self._aws_cfg = AWSConfig(color=self.c)
        self._out = Output(self._defaults.colors_enabled)

        self.example = f"\n\n{self.c.fg_bl}{CLI_NAME} {login.name} \n" \
                       f"{self.c.rs}{self.c.fg_yl}  --or--{self.c.rs}\n" \
                       f"{self.c.fg_bl}{CLI_NAME} {login.name} {sandbox.name}{self.c.rs}"

    def login(self):
        self._utils.validate(
            self._defaults.provider.name in Provider.names(),
            f"You cannot login until you've configured Figgy. Please run `{CLI_NAME}` --configure"
        )
        provider = SessionProviderFactory(self._defaults,
                                          self._figgy_context).instance()
        assumable_roles: List[AssumableRole] = provider.get_assumable_roles()
        self._out.print(
            f"{self.c.fg_bl}Found {len(assumable_roles)} possible logins. Logging in...{self.c.rs}"
        )

        for role in assumable_roles:
            self._out.print(
                f"Login successful for {role.role} in environment: {role.run_env}"
            )
            provider.get_session_and_role(role, False)

        self._out.print(
            f"{self.c.fg_gr}Login successful. All sessions are cached.{self.c.rs}"
        )

    def login_sandbox(self):
        """
        If user provides --role flag, skip role & env selection for a smoother user experience.
        """
        EnvironmentValidator(self._defaults).validate_environment_variables()

        Utils.wipe_vaults() or Utils.wipe_defaults(
        ) or Utils.wipe_config_cache()

        self._out.print(
            f"{self.c.fg_bl}Logging you into the Figgy Sandbox environment.{self.c.rs}"
        )
        user = Input.input("Please input a user name: ", min_length=2)
        colors = Input.select_enable_colors()

        # Prompt user for role if --role not provided
        if commands.role not in self.context.options:
            role = Input.select("\n\nPlease select a role to impersonate: ",
                                valid_options=SANDBOX_ROLES)
        else:
            role = self.context.role.role
            self._utils.validate(
                role in SANDBOX_ROLES,
                f"Provided role: >>>`{role}`<<< is not a valid sandbox role."
                f" Please choose from {SANDBOX_ROLES}")

        params = {'role': role, 'user': user}
        result = requests.get(GET_SANDBOX_CREDS_URL, params=params)

        if result.status_code != 200:
            self._utils.error_exit(
                "Unable to get temporary credentials from the Figgy sandbox. If this problem "
                f"persists please notify us on our GITHUB: {FIGGY_GITHUB}")

        data = result.json()
        response = SandboxLoginResponse(**data)
        self._aws_cfg.write_credentials(
            access_key=response.AWS_ACCESS_KEY_ID,
            secret_key=response.AWS_SECRET_ACCESS_KEY,
            token=response.AWS_SESSION_TOKEN,
            region=FIGGY_SANDBOX_REGION,
            profile_name=FIGGY_SANDBOX_PROFILE)

        defaults = CLIDefaults.sandbox(user=user, role=role, colors=colors)
        self._setup.save_defaults(defaults)

        run_env = RunEnv(
            env='dev',
            account_id=SANDBOX_DEV_ACCOUNT_ID) if self.context.role else None

        config_mgr = ConfigManager.figgy()
        config_mgr.set(Config.Section.Bastion.PROFILE, FIGGY_SANDBOX_PROFILE)
        defaults = self._setup.configure_extras(defaults)
        defaults = self._setup.configure_roles(current_defaults=defaults,
                                               role=Role(role=role),
                                               run_env=run_env)
        defaults = self._setup.configure_figgy_defaults(defaults)
        self._setup.save_defaults(defaults)

        self._out.success(
            f"\nLogin successful. Your sandbox session will last for [[1 hour]]."
        )

        self._out.print(
            f"\nIf your session expires, you may rerun `{CLI_NAME} login sandbox` to get another sandbox session. "
            f"\nAll previous figgy sessions have been disabled, you'll need to run {CLI_NAME} "
            f"--configure to leave the sandbox.")

    @VersionTracker.notify_user
    @AnonymousUsageTracker.track_command_usage
    def execute(self):
        if self.context.command == login:
            self.login()
        elif self.context.command == sandbox:
            Utils.wipe_vaults() or Utils.wipe_defaults()
            self.login_sandbox()