Esempio n. 1
0
def unseal_secrets(env: str) -> None:
    """
    Decrypts the secrets for the desired env and base64 decodes them to make
    them easy to edit.

    :param str env: The environment.
    """
    # Validate env
    load_env_settings(env)

    master_key = get_master_key(env=env)
    secrets_pem = secrets_pem_path(env=env)

    sealed_secret_files = [
        secret_file
        for secret_file in (Path("envs") / env / "secrets").glob("*.yaml")
        if not secret_file.name.endswith(UNSEALED_SECRETS_EXTENSION)
    ]

    label(logger.info, f"Unsealing secrets for {env}")

    for input_file in sealed_secret_files:
        output_file = input_file.with_name(input_file.stem +
                                           UNSEALED_SECRETS_EXTENSION)

        logger.info(f"Unsealing {input_file} to {output_file}")

        content = input_file.read_text(encoding="utf-8")

        content = kube_unseal(content, master_key, cert=secrets_pem)
        content = base64_decode_secrets(content)

        output_file.write_text(content, encoding="utf-8")
Esempio n. 2
0
def kubeval(keep_configs=False):
    """
    Check that all Kubernetes configs look valid with kubeval
    """

    label(logger.info, "Checking Kubernetes configs")

    def _should_ignore(path):
        if TMP in path.parents:
            return True

        return False

    merge_tmp = TMP / f"kubeval-{generate_random_id()}"

    kube_yamls = [
        str(get_merged_kube_file(path, merge_tmp))
        for path in Path(".").glob("**/kube/*.yaml")
        if not _should_ignore(path)
    ]

    skip_kinds = ",".join(KUBEVAL_SKIP_KINDS)

    run(["kubeval", "--strict", "--skip-kinds", skip_kinds] + kube_yamls)

    if not keep_configs and merge_tmp.exists():
        logger.info(f"Removing temporary kube merges from {merge_tmp}")
        rmtree(merge_tmp)
    if keep_configs and merge_tmp.exists():
        logger.info(f"Keeping temporary kube merges in {merge_tmp}")
Esempio n. 3
0
def init_hooks(ctx):
    """
    Initialize version control hooks
    :param Context ctx:
    """
    label(logger.info, "Installing pre-commit hooks")
    run(["pre-commit", "install"])
Esempio n. 4
0
def seal_secrets(env: str) -> None:
    """
    Base64 encodes and seals the secrets for the desired env.

    :param str env: The environment.
    """
    # Validate env
    load_env_settings(env)

    secrets_pem = secrets_pem_path(env=env)

    unsealed_secret_files = (Path("envs") / env /
                             "secrets").glob(f"*{UNSEALED_SECRETS_EXTENSION}")

    label(logger.info, f"Sealing secrets for {env}")

    for input_file in unsealed_secret_files:
        output_file_name = input_file.name[:-len(UNSEALED_SECRETS_EXTENSION
                                                 )] + ".yaml"
        output_file = input_file.with_name(output_file_name)

        logger.info(f"Sealing {input_file} as {output_file}")

        content = input_file.read_text(encoding="utf-8")

        content = base64_encode_secrets(content)
        content = kube_seal(content, cert=secrets_pem)

        output_file.write_text(content, encoding="utf-8")
Esempio n. 5
0
def get_master_key(env: str) -> None:
    """
    Get the master key for SealedSecrets for the given env.

    :param str env: The environment
    """
    settings = load_env_settings(env)
    ensure_context(settings.KUBE_CONTEXT)

    label(logger.info, f"Getting master key for {env}")

    # Based on:
    # https://github.com/bitnami-labs/sealed-secrets#how-can-i-do-a-backup-of-my-sealedsecrets
    result = run([
        "kubectl",
        "get",
        "secret",
        "-n",
        "kube-system",
        "-l",
        "sealedsecrets.bitnami.com/sealed-secrets-key",
        "-o",
        "yaml",
    ])

    content = result.stdout.decode(encoding="utf-8")
    output_file = master_key_path(env=env)

    logger.info(f"Saving master key to {output_file}")

    output_file.write_text(content, encoding="utf-8")
Esempio n. 6
0
    def build(self, ctx: Context, dry_run=False):
        label(logger.info, f"Building {self.path}")
        dockerfile = self.path / "Dockerfile"

        if not dockerfile.exists():
            logger.info(f"No Dockerfile for {self.name} component")
            return

        if dry_run:
            logger.info(f"[DRY RUN] Building {self.name} Docker image")
        else:
            logger.info(f"Building {self.name} Docker image")
            tag = self._get_full_docker_name()
            run(["docker", "build", self.path, "-t", tag], stream=True)
Esempio n. 7
0
def cleanup_acr_repository(ctx, registry, repository):
    """
    Clean up a single repository in Azure Container Registry
    :param Context ctx:
    :param str registry:
    :param str repository:
    """
    label(logger.info, f"Cleaning up ACR {registry}/{repository} repository")
    result = run([
        "az",
        "acr",
        "repository",
        "show-tags",
        "--name",
        registry,
        "--repository",
        repository,
    ])
    tags = json.loads(result.stdout)

    # <branch>-<hash>-<YYYYMMDD>-<HHMMSS>
    tag_match = re.compile(r"^([^-]+)-([A-Za-z0-9]{7})-([0-9]+)-([0-9]+)$")

    def _sort_tag(key):
        """
        From <branch>-<hash>-<datetime> to <datetime>-<branch>-<hash>
        :param str key:
        :return str:
        """
        return re.sub(tag_match, "\\3-\\4-\\1-\\2", key)

    for tag in sorted(tags, key=_sort_tag)[MAX_TAGS:]:
        print(f"Deleting old tag {tag}")
        run([
            "az",
            "acr",
            "repository",
            "delete",
            "--yes",
            "--name",
            registry,
            "--image",
            f"{repository}:{tag}",
        ])
Esempio n. 8
0
def seal_secrets(env: str, only_changed=False) -> None:
    """
    Base64 encodes and seals the secrets for the desired env.

    :param str env: The environment.
    :param bool only_changed: Reseal only changed secrets.
    """
    # Validate env
    load_env_settings(env)

    secrets_pem = secrets_pem_path(env=env)

    unsealed_secret_files = (Path("envs") / env /
                             "secrets").glob(f"*{UNSEALED_SECRETS_EXTENSION}")

    label(logger.info, f"Sealing secrets for {env}")

    for input_file in unsealed_secret_files:
        output_file_name = input_file.name[:-len(UNSEALED_SECRETS_EXTENSION
                                                 )] + ".yaml"
        output_file = input_file.with_name(output_file_name)

        logger.info(f"Sealing {input_file} as {output_file}")

        content = input_file.read_text(encoding="utf-8")
        content = base64_encode_secrets(content)
        sealed_content = kube_seal(content, cert=secrets_pem)

        if only_changed and output_file.exists():
            master_key = get_master_key(env=env)
            sealed_original_content = output_file.read_text(encoding="utf-8")
            original_content = kube_unseal(sealed_original_content,
                                           master_key,
                                           cert=secrets_pem)
            sealed_content = _revert_unchanged_secrets(
                content, sealed_content, original_content,
                sealed_original_content)
        else:
            # Load and dump yaml to ensure consistent formatting with above
            sealed_content = yaml.safe_dump(yaml.safe_load(sealed_content))

        output_file.write_text(sealed_content, encoding="utf-8")
Esempio n. 9
0
    def build(self, ctx: Context, dry_run=False, docker_args=None):
        label(logger.info, f"Building {self.path}")
        dockerfile = self.path / "Dockerfile"

        if not dockerfile.exists():
            logger.info(f"No Dockerfile for {self.name} component")
            return

        build_args = []
        if docker_args:
            # Insert --build-arg before each item from docker_args.
            for docker_arg in docker_args:
                build_args.extend(["--build-arg", docker_arg])

        if dry_run:
            logger.info(f"[DRY RUN] Building {self.name} Docker image")
        else:
            logger.info(f"Building {self.name} Docker image")
            tag = self._get_full_docker_name()
            run(["docker", "build", *build_args, self.path, "-t", tag],
                stream=True)
Esempio n. 10
0
def get_master_key(env: str, use_existing=True) -> Path:
    """
    Get the master key for SealedSecrets for the given env.

    :param str env: The environment
    :param bool use_existing: If set to True, tries to use existing key from filesystem
    instead of fetching a new one from the cluster.
    :return Path: The path to the master key
    """
    settings = load_env_settings(env)
    master_key_file = master_key_path(env=env)
    if use_existing and master_key_file.exists():
        return master_key_file

    ensure_context(settings.KUBE_CONTEXT)

    label(logger.info, f"Getting master key for {env}")

    # Based on:
    # https://github.com/bitnami-labs/sealed-secrets#how-can-i-do-a-backup-of-my-sealedsecrets
    result = run([
        "kubectl",
        "get",
        "secret",
        "-n",
        "kube-system",
        "-l",
        "sealedsecrets.bitnami.com/sealed-secrets-key",
        "-o",
        "yaml",
    ])

    content = result.stdout.decode(encoding="utf-8")

    logger.info(f"Saving master key to {master_key_file}")

    master_key_file.write_text(content, encoding="utf-8")
    return master_key_file
Esempio n. 11
0
def kubeval(ctx):
    """
    Check that all Kubernetes configs look valid with kubeval
    :param Context ctx:
    """

    label(logger.info, "Checking Kubernetes configs")

    def _should_ignore(path):
        s = str(path)
        if s.startswith("temp"):
            return True

        return False

    kube_yamls = [
        str(path) for path in Path(".").glob("**/kube/*.yaml")
        if not _should_ignore(path)
    ]

    skip_kinds = ",".join(devops.settings.KUBEVAL_SKIP_KINDS)

    run(["kubeval", "--skip-kinds", skip_kinds] + kube_yamls)
Esempio n. 12
0
def release(
    ctx,
    env,
    component=None,
    image=None,
    tag=None,
    replicas=None,
    dry_run=False,
    keep_configs=False,
    no_rollout_wait=False,
):
    tags: dict = {}
    images: dict = {}
    replica_counts: dict = {}
    components: List[str] = []

    if image:
        for i in image:
            path, value = i.split("=")
            images[path] = value

    if tag:
        for t in tag:
            path, value = t.split("=")
            tags[path] = value

    if replicas:
        for r in replicas:
            path, value = r.split("=")
            replica_counts[path] = value

    rel_id = generate_release_id()
    big_label(logger.info, f"Release {rel_id} to {env} environment starting")
    settings = load_env_settings(env)

    if component:
        components = component
    else:
        components = settings.COMPONENTS

    # Override env settings for replicas
    if replica_counts:
        for path in replica_counts:
            settings.REPLICAS[path] = replica_counts[path]

    rel_path = RELEASE_TMP / rel_id

    logger.info("")
    logger.info("Releasing components:")
    for component in components:
        logger.info(f" - {component}")

    logger.info("")
    logger.info("Setting images and tags:")
    for path in components:
        tag = "(default)"
        image = "(default)"

        if path in tags:
            tag = tags[path]
        if path in images:
            image = images[path]

        logger.info(f" - {path} = {image}:{tag}")
    logger.info("")

    ensure_context(settings.KUBE_CONTEXT)
    ensure_namespace(settings.KUBE_NAMESPACE)
    release_env(ctx, env, dry_run)

    for path in components:
        logger.info("")
        label(logger.info, f"Releasing component {path}")

        component = Component(path)
        if path in images:
            component.image = images[path]
            images.pop(path)
        if path in tags:
            component.tag = tags[path]
            tags.pop(path)
        if path in settings.REPLICAS:
            component.replicas = settings.REPLICAS[path]
            replica_counts.pop(path, None)

        component.namespace = settings.KUBE_NAMESPACE
        component.context = settings.KUBE_CONTEXT
        component.image_pull_secrets = settings.IMAGE_PULL_SECRETS

        component.patch_from_env(env)
        component.validate(ctx)

        component.release(ctx, rel_path, dry_run, no_rollout_wait)

    if images:
        logger.error("Unprocessed image configurations:")
        for path in images:
            logger.error(f" - {path}={images[path]}")

    if tags:
        logger.error("Unprocessed tag configurations:")
        for path in tags:
            logger.error(f" - {path}={tags[path]}")

    if replica_counts:
        logger.error("Unprocessed replica configurations:")
        for path in replica_counts:
            logger.error(f" - {path}={replica_counts[path]}")

    if not keep_configs:
        logger.info(f"Removing temporary configurations from {rel_path}")
        if rel_path.exists():
            rmtree(rel_path)
Esempio n. 13
0
def init_kubernetes(ctx, env):
    """
    Initialize Kubernetes cluster
    :param Context ctx:
    :param str env:
    :return:
    """
    label(logger.info, f"Initializing Kubernetes for {env}")

    settings = load_env_settings(env)
    devops.tasks.ensure_context(settings.KUBE_CONTEXT)
    devops.tasks.ensure_namespace(settings.KUBE_NAMESPACE)

    def _get_kube_files(kube_context):
        kube_files = {f.name: f for f in Path("kube").glob("*.yaml")}

        overrides = (Path("kube") / kube_context / "overrides").glob("*.yaml")
        for f in overrides:
            kube_files[f.name] = f

        # Convert to sorted list
        kube_files = [kube_files[name] for name in sorted(kube_files.keys())]
        return kube_files

    def _apply(config, **kwargs):
        run(["kubectl", "apply", "-f", config], **kwargs)

    secrets = Path("envs") / env / "secrets.pem"
    if env == LOCAL_ENV:
        # Make sure local Sealed Secrets master key is applied first
        master_key = Path("envs") / env / "secrets.key"
        if master_key.exists():
            logger.info(
                f"Applying Sealed Secrets master key from {master_key}")
            _apply(master_key, check=False)

    for c in _get_kube_files(settings.KUBE_CONTEXT):
        _apply(c)

    # Wait for Sealed Secrets -controller to start up
    run([
        "kubectl",
        "rollout",
        "status",
        "--namespace",
        "kube-system",
        "deploy/sealed-secrets-controller",
    ])

    # And try to dump the signing cert
    logger.info("Trying to fetch Sealed Secrets signing cert")
    attempts = 5
    while True:
        try:
            res = run(["kubeseal", "--fetch-cert"])
        except CalledProcessError:
            attempts -= 1
            if attempts <= 0:
                raise Exception("Failed to fetch Sealed Secrets cert")

            sleep(2)
            continue

        with secrets.open("w") as dst:
            dst.write(res.stdout.decode("utf-8"))

        break

    if env == LOCAL_ENV:
        # Store master key if needed
        master_key = Path("envs") / env / "secrets.key"
        if not master_key.exists():
            logger.info("Trying to store Sealed Secrets master key")
            res = run([
                "kubectl",
                "get",
                "secret",
                "--namespace",
                "kube-system",
                "-o",
                "custom-columns=name:metadata.name",
            ])
            secrets = []
            for line in res.stdout.decode("utf-8").splitlines():
                if line.startswith("sealed-secrets-key"):
                    secrets.append(line)

            with master_key.open("w") as dst:
                first = True
                for secret in secrets:
                    if not first:
                        dst.write("---\n")
                    first = False
                    res = run([
                        "kubectl",
                        "get",
                        "secret",
                        "--namespace",
                        "kube-system",
                        secret,
                        "-o",
                        "yaml",
                    ])
                    print(res.stdout)
                    dst.write(res.stdout.decode("utf-8") + "\n")