def __init__(self, env: ApplicationVersion, config: dict): super().__init__(env, config) self.vault_name, self.vault_client = KeyVaultClient.vault_and_client( self.config, self.env) self.databricks_client = Databricks( self.vault_name, self.vault_client).api_client(self.config) self.secret_api = SecretApi(self.databricks_client)
def export_cli(dry_run, tag, delete, git_ssh_url, api_client: ApiClient, hcl, pattern_matches): block_key_map = {} ignore_attribute_key = {"last_updated_timestamp"} required_attributes_key = {"key"} if hcl: secret_api = SecretApi(api_client) scopes = secret_api.list_scopes()["scopes"] log.info(scopes) with GitExportHandler(git_ssh_url, "secrets", delete_not_found=delete, dry_run=dry_run, tag=tag) as gh: for scope in scopes: secrets = secret_api.list_secrets(scope["name"])["secrets"] log.info(secrets) for secret in secrets: if not pattern_matches(secret["key"]): log.debug( f"{secret['key']} did not match pattern function {pattern_matches}" ) continue log.debug( f"{secret['key']} matched the pattern function {pattern_matches}" ) secret_resource_data = prep_json(block_key_map, ignore_attribute_key, secret, required_attributes_key) base_name = normalize_identifier(secret["key"]) name = "databricks_secret" identifier = f"databricks_secret-{base_name}" secret_resource_data["scope"] = scope["name"] secret_hcl = create_resource_from_dict( name, identifier, secret_resource_data, False) file_name_identifier = f"{identifier}.tf" gh.add_file(file_name_identifier, secret_hcl) log.debug(secret_hcl)
def list_acls(api_client, scope, output): """ Lists the ACLs set on the given secret scope. """ acls_json = SecretApi(api_client).list_acls(scope) if OutputClickType.is_json(output): click.echo(pretty_format(acls_json)) else: click.echo(tabulate(_acls_to_table(acls_json), headers=ACL_HEADER))
def put_acl(api_client, scope, principal, permission): """ Creates or overwrites the ACL associated with the given principal (user or group) on the specified secret scope. "databricks secrets write-acl" is an alias for "databricks secrets put-acl", and will be deprecated in a future release. """ SecretApi(api_client).put_acl(scope, principal, permission)
def list_scopes(api_client, output): """ Lists all secret scopes. """ scopes_json = SecretApi(api_client).list_scopes() if OutputClickType.is_json(output): click.echo(pretty_format(scopes_json)) else: click.echo(tabulate(_scopes_to_table(scopes_json), headers=SCOPE_HEADER))
def get_acl(api_client, scope, principal, output): """ Describes the details about the given ACL for the principal and secret scope. """ acl_json = SecretApi(api_client).get_acl(scope, principal) if OutputClickType.is_json(output): click.echo(pretty_format(acl_json)) else: acl_list = _acls_to_table({'items': [acl_json]}) click.echo(tabulate(acl_list, headers=ACL_HEADER))
def list_secrets(api_client, scope, output): """ Lists the secret keys that are stored at this scope. Also lists the last updated timestamp (UNIX time in milliseconds) if available. """ secrets_json = SecretApi(api_client).list_secrets(scope) if OutputClickType.is_json(output): click.echo(pretty_format(secrets_json)) else: click.echo(tabulate(_secrets_to_table(secrets_json), headers=SECRET_HEADER))
def create_scope(api_client, scope, initial_manage_principal, scope_backend_type, resource_id, dns_name): """ Creates a new secret scope with given name. """ backend_azure_keyvault = { 'resource_id': resource_id, 'dns_name': dns_name } SecretApi(api_client).create_scope(scope, initial_manage_principal, scope_backend_type, backend_azure_keyvault)
def export_cli(dry_run, tag, delete, git_ssh_url, api_client: ApiClient, hcl, pattern_matches): block_key_map = {} ignore_attribute_key = {} required_attributes_key = {"principal", "permission"} if hcl: secret_api = SecretApi(api_client) scopes = secret_api.list_scopes()["scopes"] log.info(scopes) with GitExportHandler(git_ssh_url, "secret_acls", delete_not_found=delete, dry_run=dry_run, tag=tag) as gh: for scope in scopes: acls = secret_api.list_acls(scope["name"])["items"] log.info(acls) for acl in acls: acl_resource_data = prep_json(block_key_map, ignore_attribute_key, acl, required_attributes_key) base_name = normalize_identifier(acl["principal"]) name = "databricks_secret_acl" identifier = f"databricks_secret_acl-{base_name}" acl_resource_data["scope"] = scope["name"] acl_hcl = create_resource_from_dict( name, identifier, acl_resource_data, False) file_name_identifier = f"{identifier}.tf" gh.add_file(file_name_identifier, acl_hcl) log.debug(acl_hcl)
class CreateDatabricksSecretsFromVault(Step, CreateDatabricksSecretsMixin): """Will connect to the supplied vault and uses prefixed names to created databricks secrets. For example given list of secrets in the vault: - `this-app-name-secret-1` - `this-app-name-secret-2` - `a-different-app-name-secret-3` it will register `secret-1` and `secret-2` and their values under the databricks secret scope `this-app-name` and ignore all other secrets, such as `secret-3` as it does not match the `this-app-name` prefix. """ def get_secret_api(self): return self.secret_api def __init__(self, env: ApplicationVersion, config: dict): super().__init__(env, config) self.vault_name, self.vault_client = KeyVaultClient.vault_and_client( self.config, self.env) self.databricks_client = Databricks( self.vault_name, self.vault_client).api_client(self.config) self.secret_api = SecretApi(self.databricks_client) def run(self): self.create_databricks_secrets() def create_databricks_secrets(self): secrets = self._combine_secrets() self._create_scope(self.application_name) self._add_secrets(self.application_name, secrets) logging.info( f'------ {len(secrets)} secrets created in "{self.env.environment}"' ) pprint(self.secret_api.list_secrets(self.application_name)) def _combine_secrets(self): vault_secrets = KeyVaultCredentialsMixin( self.vault_name, self.vault_client).get_keyvault_secrets(self.application_name) deployment_secrets = DeploymentYamlEnvironmentVariablesMixin( self.env, self.config).get_deployment_secrets() return list(set(vault_secrets + deployment_secrets)) def schema(self) -> vol.Schema: return SCHEMA
def put_secret(api_client, scope, key, string_value, binary_file): """ Puts a secret in the provided scope with the given name. Overwrites any existing value if the name exists. You should specify at most one option in "string-value" and "binary-file". If "string-value", the argument will be stored in UTF-8 (MB4) form. If "binary-file", the argument should be a path to file. File content will be read as secret value and stored as bytes. If none of "string-value" and "binary-file" specified, an editor will be opened for inputting secret value. The value will be stored in UTF-8 (MB4) form. "databricks secrets write" is an alias for "databricks secrets put", and will be deprecated in a future release. """ string_param, bytes_param = _verify_and_translate_options(string_value, binary_file) SecretApi(api_client).put_secret(scope, key, string_param, bytes_param)
import sys from databricks_cli.secrets.api import SecretApi from databrickslabs_jupyterlab.remote import connect from helpers import get_profile profile = get_profile() try: apiclient = connect(profile) client = SecretApi(apiclient) except Exception as ex: # pylint: disable=broad-except print(ex) sys.exit(1) client.create_scope("dbjl-pytest", None) client.put_secret("dbjl-pytest", "pytest-key", "databrickslabs-jupyterlab", None)
def delete_acl(api_client, scope, principal): """ Deletes the given ACL on the given secret scope. """ SecretApi(api_client).delete_acl(scope, principal)
def delete_secret(api_client, scope, key): """ Deletes the secret stored in this scope. """ SecretApi(api_client).delete_secret(scope, key)
def delete_scope(api_client, scope): """ Deletes a secret scope. """ SecretApi(api_client).delete_scope(scope)
def create_scope(api_client, scope, initial_manage_principal): """ Creates a new secret scope with given name. """ SecretApi(api_client).create_scope(scope, initial_manage_principal)