self.get_secret_api().create_scope(scope_name, None)

    def _add_secrets(self, scope_name: str, secrets: List[Secret]):
        """Add Databricks secrets to the provided scope

        Args:
            scope_name: The name of the scope to create secrets in
            secrets: List of secrets
        """
        for secret in secrets:
            logger.info(f"Set secret {scope_name}: {secret.key}")
            self.get_secret_api().put_secret(scope_name, secret.key, secret.val, None)


SCHEMA = TAKEOFF_BASE_SCHEMA.extend(
    {vol.Required("task"): "create_databricks_secrets_from_vault"}, extra=vol.ALLOW_EXTRA
)


class CreateDatabricksSecretsFromVault(Step, CreateDatabricksSecretsMixin):
    """Will connect to the supplied vault and uses prefixed names to created databricks secrets.

    For example given list of secrets in the vault:

    - `this-app-name-secret-1`
    - `this-app-name-secret-2`
    - `a-different-app-name-secret-3`

    it will register `secret-1` and `secret-2` and their values under the databricks secret scope
    `this-app-name` and ignore all other secrets, such as `secret-3` as it does not match
    the `this-app-name` prefix.
Esempio n. 2
0
DEPLOY_SCHEMA = TAKEOFF_BASE_SCHEMA.extend(
    {
        vol.Required("task"):
        "deploy_to_kubernetes",
        vol.Optional("credentials", default="environment_variables"):
        vol.All(str, vol.In(["environment_variables", "azure_keyvault"])),
        vol.Required("kubernetes_config_path"):
        str,
        vol.Optional(
            "image_pull_secret",
            default={
                "create": True,
                "secret_name": "registry-auth",
                "namespace": "default"
            },
        ): {
            vol.Optional("create", default=True): bool,
            vol.Optional("secret_name", default="registry-auth"): str,
            vol.Optional("namespace", default="default"): str,
        },
        vol.Optional("custom_values", default={}): {},
        vol.Optional("restart_unchanged_resources", default=False):
        bool,
        "azure": {
            vol.Required(
                "kubernetes_naming",
                description=("Naming convention for the resource."
                             "This should include the {env} parameter. For example"
                             "aks_{env}"),
            ):
            str
        },
    },
    extra=vol.ALLOW_EXTRA,
)
Esempio n. 3
0
SCHEMA = TAKEOFF_BASE_SCHEMA.extend(
    {
        vol.Required("task"):
        "deploy_to_databricks",
        vol.Required("jobs"):
        vol.All(
            [{
                vol.Required("main_name"):
                str,
                vol.Optional(
                    "use_original_python_filename",
                    description=("""If you upload multiple unique Python files use this flag to include the
                            original filename in the result. Only impacts Python files."""),
                    default=False,
                ):
                bool,
                vol.Optional("config_file", default="databricks.json.j2"):
                str,
                vol.Optional("name", default=""):
                str,
                vol.Optional("lang", default="python"):
                vol.All(str, vol.In(["python", "scala"])),
                vol.Optional("run_stream_job_immediately", default=True):
                bool,
                vol.Optional("is_batch", default=False):
                bool,
                vol.Optional("arguments", default=[{}]): [{}],
                vol.Optional("schedule"): {
                    vol.Required("quartz_cron_expression"): str,
                    vol.Required("timezone_id"): str,
                },
            }],
            vol.Length(min=1),
        ),
        "common": {
            vol.Optional("databricks_fs_libraries_mount_path"): str
        },
    },
    extra=vol.ALLOW_EXTRA,
)
Esempio n. 4
0
from takeoff.util import has_prefix_match, get_whl_name, get_main_py_name

logger = logging.getLogger(__name__)

SCHEMA = TAKEOFF_BASE_SCHEMA.extend(
    {
        vol.Required("task"): "deploy_to_databricks",
        vol.Required("jobs"): vol.All(
            [
                {
                    vol.Required("main_name"): str,
                    vol.Optional("config_file", default="databricks.json.j2"): str,
                    vol.Optional("name", default=""): str,
                    vol.Optional("lang", default="python"): vol.All(str, vol.In(["python", "scala"])),
                    vol.Optional("arguments", default=[{}]): [{}],
                    vol.Optional("schedule"): {
                        vol.Required("quartz_cron_expression"): str,
                        vol.Required("timezone_id"): str,
                    },
                }
            ],
            vol.Length(min=1),
        ),
        "common": {vol.Optional("databricks_fs_libraries_mount_path"): str},
    },
    extra=vol.ALLOW_EXTRA,
)


@dataclass(frozen=True)
class JobConfig(object):
Esempio n. 5
0
    instance: dict
    endpoint: str


@dataclass(frozen=True)
class CosmosCredentials(object):
    uri: str
    key: str


SCHEMA = TAKEOFF_BASE_SCHEMA.extend({
    "azure": {
        vol.Required(
            "cosmos_naming",
            description=("Naming convention for the resource."
                         "This should include the {env} parameter. For example"
                         "cosmos_{env}"),
        ):
        str
    }
})


class Cosmos(object):
    def __init__(self, env: ApplicationVersion, config: dict):
        self.env = env
        self.config = SCHEMA.validate(config)

    def _get_cosmos_management_client(self) -> CosmosDB:
        vault, client = KeyVaultClient.vault_and_client(self.config, self.env)
        credentials = ActiveDirectoryUserCredentials(
Esempio n. 6
0
from takeoff.azure.credentials.keyvault import KeyVaultClient
from takeoff.azure.credentials.subscription_id import SubscriptionId
from takeoff.azure.util import get_resource_group_name, get_azure_credentials_object
from takeoff.credentials.secret import Secret
from takeoff.schemas import TAKEOFF_BASE_SCHEMA
from takeoff.step import Step

logger = logging.getLogger(__name__)

SCHEMA = TAKEOFF_BASE_SCHEMA.extend(
    {
        vol.Required("task"):
        "create_application_insights",
        vol.Required("kind"):
        vol.Any("web", "ios", "other", "store", "java", "phone"),
        vol.Required("application_type"):
        vol.Any("web", "other"),
        vol.Optional("create_databricks_secret", default=False):
        bool,
    },
    extra=vol.ALLOW_EXTRA,
)


class CreateApplicationInsights(Step):
    """Create an Application Insights service

    Credentials for an AAD user (username, password) must be available
    in your cloud vault.

    Optionally propagate the instrumentation key to Databricks as secret.
Esempio n. 7
0
SCHEMA = TAKEOFF_BASE_SCHEMA.extend(
    {
        vol.Required("task"): "build_docker_image",
        vol.Optional("credentials", default="environment_variables"): vol.All(
            str, vol.In(["environment_variables", "azure_keyvault"])
        ),
        vol.Optional(
            "dockerfiles",
            default=[
                {
                    "file": "Dockerfile",
                    "postfix": None,
                    "prefix": None,
                    "custom_image_name": None,
                    "tag_release_as_latest": True,
                }
            ],
        ): [
            {
                vol.Optional("file", default="Dockerfile", description="Alternative docker file name"): str,
                vol.Optional(
                    "postfix",
                    default=None,
                    description="Postfix for the image name, will be added `before` the tag",
                ): vol.Any(None, str),
                vol.Optional(
                    "prefix",
                    default=None,
                    description=(
                        "Prefix for the image name, will be added `between` the image name"
                        "and repository (e.g. myreg.io/prefix/my-app:tag"
                    ),
                ): vol.Any(None, str),
                vol.Optional(
                    "custom_image_name", default=None, description="A custom name for the image to be used."
                ): vol.Any(None, str),
                vol.Optional(
                    "tag_release_as_latest", default=True, description="Tag a release also as 'latest' image."
                ): vol.Any(None, bool),
            }
        ],
    },
    extra=vol.ALLOW_EXTRA,
)
SCHEMA = TAKEOFF_BASE_SCHEMA.extend(
    {
        vol.Required("task"):
        "configure_eventhub",
        vol.Optional("credentials_type", default="active_directory_user"):
        vol.All(str, vol.In(["active_directory_user", "service_principal"])),
        vol.Optional("credentials", default="azure_keyvault"):
        vol.All(str, vol.In(["azure_keyvault"])),
        vol.Optional("create_consumer_groups"):
        vol.All(
            vol.Length(min=1),
            [{
                vol.Required("eventhub_entity_naming"):
                str,
                vol.Required("consumer_group"):
                str,
                vol.Optional("create_databricks_secret", default=False):
                bool,
                vol.Optional("append_env_to_databricks_secret_name",
                             default=False):
                bool,
            }],
        ),
        vol.Optional("create_producer_policies"):
        vol.All(
            vol.Length(min=1),
            [{
                vol.Required("eventhub_entity_naming"): str,
                vol.Optional("create_databricks_secret", default=False): bool,
            }],
        ),
        "azure": {
            vol.Required(
                "eventhub_naming",
                description=("Naming convention for the resource."
                             "This should include the {env} parameter. For example"
                             "myeventhub{env}"),
            ):
            str
        },
    },
    extra=vol.ALLOW_EXTRA,
)
Esempio n. 9
0
SCHEMA = TAKEOFF_BASE_SCHEMA.extend(
    {
        vol.Required("task"):
        "configure_eventhub",
        vol.Optional("create_consumer_groups"):
        vol.All(
            vol.Length(min=1),
            [{
                vol.Required("eventhub_entity_naming"): str,
                vol.Required("consumer_group"): str,
                vol.Optional("create_databricks_secret", default=False): bool,
            }],
        ),
        vol.Optional("create_producer_policies"):
        vol.All(
            vol.Length(min=1),
            [{
                vol.Required("eventhub_entity_naming"): str,
                vol.Optional("create_databricks_secret", default=False): bool,
            }],
        ),
        "azure": {
            vol.Required(
                "eventhub_naming",
                description=("Naming convention for the resource."
                             "This should include the {env} parameter. For example"
                             "myeventhub{env}"),
            ):
            str
        },
    },
    extra=vol.ALLOW_EXTRA,
)
Esempio n. 10
0
DEPLOY_SCHEMA = TAKEOFF_BASE_SCHEMA.extend(
    {
        vol.Required("task"):
        "deploy_to_kubernetes",
        vol.Optional("credentials_type", default="active_directory_user"):
        vol.All(str, vol.In(["active_directory_user", "service_principal"])),
        vol.Optional("credentials", default="azure_keyvault"):
        vol.All(str, vol.In(["azure_keyvault"])),
        vol.Required("kubernetes_config_path"):
        str,
        vol.Optional(
            "image_pull_secret",
            default={
                "create": True,
                "secret_name": "registry-auth",
                "namespace": "default"
            },
        ): {
            vol.Optional("create", default=True): bool,
            vol.Optional("secret_name", default="registry-auth"): str,
            vol.Optional("namespace", default="default"): str,
        },
        vol.Optional("custom_values", default={}): {},
        vol.Optional("restart_unchanged_resources", default=False):
        bool,
        vol.Optional("wait_for_rollout"): {
            vol.Optional("resource_name", default="foo/bar"):
            vol.All(str, vol.Match("^.*/.*$")),
            vol.Optional("resource_namespace", default=""):
            str,
        },
        "azure": {
            vol.Required(
                "kubernetes_naming",
                description=("Naming convention for the resource."
                             "This should include the {env} parameter. For example"
                             "aks_{env}"),
            ):
            str
        },
    },
    extra=vol.ALLOW_EXTRA,
)
Esempio n. 11
0
import logging
import shutil

import voluptuous as vol

from takeoff.application_version import ApplicationVersion
from takeoff.schemas import TAKEOFF_BASE_SCHEMA
from takeoff.step import Step
from takeoff.util import run_shell_command

logger = logging.getLogger(__name__)

SCHEMA = TAKEOFF_BASE_SCHEMA.extend(
    {
        vol.Required("task"): "build_artifact",
        vol.Required("build_tool"): vol.All(str, vol.In(["python", "sbt"])),
    },
    extra=vol.ALLOW_EXTRA,
)


class BuildArtifact(Step):
    def __init__(self, env: ApplicationVersion, config: dict):
        """Build an artifact"""
        super().__init__(env, config)

    def run(self):
        if self.config["build_tool"] == "python":
            self.build_python_wheel()
        elif self.config["build_tool"] == "sbt":
            self.build_sbt_assembly_jar()