def config_deleteold(project_dir: str, workspace_dir: str, **kwargs) -> None: """`handoff config delete -p <project_directory>` Delete the project configuration from the remote parameter store. """ platform = cloud.get_platform() platform.delete_parameter("config") return "success"
def files_get(project_dir: str, workspace_dir: str, **kwargs) -> None: """`handoff files get -p <project_directory> -w <workspace_directory>` Download remote files to <workspace_dir>/files It also parse the templates with secrets and populate under <workspace_dir>/files """ state = get_state() state.validate_env( [RESOURCE_GROUP, TASK, CLOUD_PROVIDER, CLOUD_PLATFORM, BUCKET]) if not workspace_dir: raise Exception("Workspace directory is not set") LOGGER.debug("Downloading config files from the remote storage " + state.get(BUCKET)) platform = cloud.get_platform() # First download to the local templates directory, then parse to save # in the workspace files directory. templates_dir = os.path.join(workspace_dir, TEMPLATES_DIR) files_dir = os.path.join(workspace_dir, FILES_DIR) # Remote files are templates that can contain variables. remote_dir = FILES_DIR platform.download_dir(templates_dir, remote_dir) # Parse and save to workspace/files _parse_template_files(templates_dir, files_dir) return "success"
def push(project_dir: str, workspace_dir: str, yes=False, **kwargs) -> None: _envs(project_dir, workspace_dir, **kwargs) state = config.get_state() state.validate_env([CONTAINER_IMAGE]) platform = cloud.get_platform() username, password, registry = platform.get_docker_registry_credentials() image_name = state.get(CONTAINER_IMAGE) try: platform.get_repository_images(image_name) except Exception: if not yes: sys.stdout.write("Repository %s does not exist. Create (y/N)?" % image_name) response = input() if response.lower() not in ["y", "yes"]: return LOGGER.info("Creating repository " + image_name) platform.create_repository() response = impl.push(username, password, registry, yes=yes, file_descriptor=sys.stdout, **kwargs) return response
def config_push(project_dir: str, workspace_dir: str, **kwargs) -> None: """`handoff config push -p <project_directory>` Push project.yml and the contents of project_dir/config as a secure parameter key. """ platform = cloud.get_platform() platform.upload_file(os.path.join(project_dir, PROJECT_FILE), PROJECT_FILE) return "success"
def files_push(project_dir: str, workspace_dir: str, **kwargs) -> None: """`handoff files push -p <project_directory>` Push the contents of <project_dir>/files and <project_dir>/templates to remote storage""" platform = cloud.get_platform() files_dir = os.path.join(project_dir, FILES_DIR) prefix = FILES_DIR platform.upload_dir(files_dir, prefix) return "success"
def secrets_push(project_dir: str, workspace_dir: str, yes: bool = None, **kwargs) -> None: """`handoff secrets push -p <project_directory> -v secrets_dir=<secrets_dir>` Push the contents of <secrets_file> to remote parameter store --vars secrets_dir (.secrets): The directory containing secrets.yml file, which is a YAML file storing secrets with format: ``` - key: key1 value: value1 - key: key2 # The value can also be loaded from a text file file: file_name # The value is stored as a resource group level secret and can be # shared among the projects under the same group. level: "resource group" ``` """ state = get_state() state.validate_env([RESOURCE_GROUP, TASK, CLOUD_PROVIDER, CLOUD_PLATFORM]) platform = cloud.get_platform() secrets = _secrets_get_local(project_dir, workspace_dir, **kwargs) if not secrets: raise Exception("No secrets are defined.") print("Secrets to be pushed to the remote parameter store:") if "config" in secrets: raise Exception("secrets with name \"config\" is reserved by handoff.") for key in secrets.keys(): skip_msg = "" if not secrets[key].get("push", True): skip_msg = " SKIP PUSH" print(" - " + key + " (" + secrets[key].get("level", "task") + " level)" + skip_msg) if yes is None: response = input("Proceed? (y/N)") if yes is False or response.lower() not in ["yes", "y"]: LOGGER.info("Not pushing the secrets by choice.") return "abort" for key in secrets: if not secrets[key].get("push", True): continue level = secrets[key].get("level", "task") platform.push_parameter( key, SECRETS[key], resource_group_level=(level.lower().strip() == "resource group"), **kwargs) return "success"
def files_delete(project_dir: str, workspace_dir: str, **kwargs) -> None: """`handoff files delete -p <project_directory>` Delete files and templates from the remote storage """ state = get_state() state.validate_env( [RESOURCE_GROUP, TASK, CLOUD_PROVIDER, CLOUD_PLATFORM, BUCKET]) platform = cloud.get_platform() dir_name = FILES_DIR platform.delete_dir(dir_name) return "success"
def _secrets_get(project_dir: str, workspace_dir: str, **kwargs) -> None: """Fetch all secrets from the remote parameter store """ state = get_state() state.validate_env([RESOURCE_GROUP, TASK, CLOUD_PROVIDER, CLOUD_PLATFORM]) platform = cloud.get_platform() LOGGER.debug("Fetching the secrets from the remote parameter store.") global SECRETS SECRETS = {} params = platform.get_all_parameters() for key in params.keys(): SECRETS[key] = params[key]["value"]
def artifacts_delete(project_dir: str, workspace_dir: str, **kwargs) -> None: """`handoff artifacts delete -p <project_directory>` Delete artifacts from the remote artifacts/last directory """ state = get_state() state.validate_env( [RESOURCE_GROUP, TASK, CLOUD_PROVIDER, CLOUD_PLATFORM, BUCKET]) LOGGER.debug("Deleting artifacts from the remote storage " + state.get(BUCKET)) platform = cloud.get_platform() dir_name = os.path.join(ARTIFACTS_DIR, BUCKET_CURRENT_PREFIX) platform.delete_dir(dir_name) return "success"
def secrets_delete(project_dir: str, workspace_dir: str, yes: bool = False, **kwargs): """`handoff secrets delete -p <project_directory> -d file=<secrets_file>` Delete the contents of <secrets_file> to remote parameter store By default, .secrets/secrets.yml in the current working directory is searched for the list of the secrets. """ state = get_state() state.validate_env([RESOURCE_GROUP, TASK, CLOUD_PROVIDER, CLOUD_PLATFORM]) platform = cloud.get_platform() secrets = _secrets_get_local(project_dir, workspace_dir, **kwargs) if not secrets: return print("Deleting the following keys to remote parameter store:") for key in secrets.keys(): skip_msg = "" if not secrets[key].get("push", True): skip_msg = " SKIP DELETE" print(" - " + key + " (" + secrets[key].get("level", "task") + " level)" + skip_msg) if not yes: response = input("Proceed? (y/N)") if response.lower() not in ["yes", "y"]: return "abort" for key in secrets: if not secrets[key].get("push", True): continue level = secrets[key].get("level", "task") try: platform.delete_parameter( key, resource_group_level=( level.lower().strip() == "resource group"), **kwargs) except Exception: LOGGER.warning("%s does not exist in remote parameter store." % key) return "success"
def artifacts_archive(project_dir: str, workspace_dir: str, **kwargs) -> None: """`handoff artifacts archive -p <project_directory>` Copy the artifacts directory from (remote) last to (remote) runs/<date>. """ state = get_state() state.validate_env( [RESOURCE_GROUP, TASK, CLOUD_PROVIDER, CLOUD_PLATFORM, BUCKET]) LOGGER.debug("Copying the remote artifacts from last to runs " + state.get(BUCKET)) platform = cloud.get_platform() dest_dir = os.path.join(ARTIFACTS_DIR, BUCKET_ARCHIVE_PREFIX, datetime.datetime.utcnow().isoformat()) platform.copy_dir_to_another_bucket( os.path.join(ARTIFACTS_DIR, BUCKET_CURRENT_PREFIX), dest_dir) return "success"
def artifacts_push(project_dir: str, workspace_dir: str, **kwargs) -> None: """`handoff artifacts push -p <project_directory> -w <workspace_directory>` Push local artifacts file to remote storage under last directory. """ state = get_state() state.validate_env( [RESOURCE_GROUP, TASK, CLOUD_PROVIDER, CLOUD_PLATFORM, BUCKET]) if not workspace_dir: raise Exception("Workspace directory is not set") LOGGER.debug("Pushing local artifacts to the remote storage " + state.get(BUCKET)) platform = cloud.get_platform() artifacts_dir = os.path.join(workspace_dir, ARTIFACTS_DIR) prefix = os.path.join(ARTIFACTS_DIR, BUCKET_CURRENT_PREFIX) platform.upload_dir(artifacts_dir, prefix) return "success"
def run(project_dir: str, workspace_dir: str, envs: Dict = {}, vars: Dict = {}, **kwargs) -> None: _envs(project_dir, workspace_dir, envs=envs, **kwargs) state = config.get_state() state.validate_env([CONTAINER_IMAGE]) platform = cloud.get_platform() env = platform.get_platform_auth_env(vars) env.update(envs) kwargs.update(vars) try: response = impl.run(extra_env=env, file_descriptor=sys.stdout, **kwargs) except Exception as e: LOGGER.critical(str(e).replace("\\n", "\n")) raise return response
def _read_project_remote(workspace_dir: str) -> Dict: """Read the config from remote parameters store (e.g. AWS SSM) """ state = get_state() LOGGER.debug("Reading precompiled config from remote.") state.validate_env([RESOURCE_GROUP, TASK, CLOUD_PROVIDER, CLOUD_PLATFORM]) platform = cloud.get_platform() account_id = state.get("AWS_ACCOUNT_ID") if not account_id: raise Exception("Failed to login to cloud account. " + "Did you forget set credentials such as AWS_PROFILE?") if not state.get(BUCKET): _set_bucket_name(state[RESOURCE_GROUP], account_id) project_file_path = os.path.join(workspace_dir, PROJECT_FILE) platform.download_file(project_file_path, PROJECT_FILE) with open(project_file_path, "r") as f: project = yaml.load(f, Loader=yaml.FullLoader) _validate_project(project) return project
def artifacts_get(project_dir: str, workspace_dir: str, **kwargs) -> None: """`handoff artifacts archive -p <project_directory> -w <workspace_directory>` Download artifacts from the (remote) last to <workspace_dir> """ state = get_state() state.validate_env( [RESOURCE_GROUP, TASK, CLOUD_PROVIDER, CLOUD_PLATFORM, BUCKET]) if not workspace_dir: raise Exception("Workspace directory is not set") LOGGER.debug("Downloading artifacts from the remote storage " + state.get(BUCKET)) platform = cloud.get_platform() artifacts_dir = os.path.join(workspace_dir, ARTIFACTS_DIR) remote_dir = os.path.join(ARTIFACTS_DIR, BUCKET_CURRENT_PREFIX) platform.download_dir(artifacts_dir, remote_dir) return "success"
def _update_state(config: Dict, vars: Dict = {}) -> None: """Set environment variable and in-memory variables Warning: environment variables are inherited to subprocess. The sensitive information may be compromised by a bad subprocess. """ state = get_state() LOGGER.debug("Setting environment variables from config.") if SECRETS: state.update(SECRETS) for v in config.get("envs", list()): if v.get("value") is None: v["value"] = _get_secret(v["key"]) if v["value"]: state.set_env(v["key"], v["value"], trust=True) for v in config.get("vars", list()): state[v["key"]] = v["value"] state.update(vars) if not state.get(BUCKET): try: platform = cloud.get_platform() aws_account_id = state.get("AWS_ACCOUNT_ID") except Exception: LOGGER.warning("Error getting platform handle") pass else: if not aws_account_id: LOGGER.warning("aws_account_id is not set") elif state.get(RESOURCE_GROUP): _set_bucket_name(state[RESOURCE_GROUP], aws_account_id) if not state.get(BUCKET): LOGGER.warning(("Environment variable %s is not set. " + "Remote file read/write will fail.") % BUCKET)
def secrets_print(project_dir: str, workspace_dir: str, **kwargs): """`handoff secrets print -p <project_directory>` Get the secrets in the remote parameter store and dump in YAML format. """ state = get_state() state.validate_env([RESOURCE_GROUP, TASK, CLOUD_PROVIDER, CLOUD_PLATFORM]) platform = cloud.get_platform() LOGGER.debug("Fetching the secrets from the remote parameter store.") params = platform.get_all_parameters() secret_list = list() for key in params.keys(): if key == "config": continue level = "task" if params[key]["path"].split("/")[-2] == state[RESOURCE_GROUP]: level = "resource group" secret_list.append({ "key": key, "level": level, "value": params[key]["value"] }) return secret_list
def _envs(project_dir: str, workspace_dir: str, **kwargs) -> None: platform = cloud.get_platform() # Do this to set CONTAINER_IMAGE _ = admin._config_get_local(project_dir, workspace_dir, **kwargs)