def _get_database_uri(): if _is_app_service(): logger.info("Getting Production DB credentials from Keyvault") credentials = MSIAuthentication() key_vault_client = KeyVaultClient(credentials) key_vault_uri = os.environ.get("KEY_VAULT_URI", "PUT YOUR KV BASE URL HERE") logger.info("Using KEY_VAULT_URI: {}".format(key_vault_uri)) result = key_vault_client.get_secret( vault_base_url=key_vault_uri, secret_name="database-url", secret_version="" # empty string -> latest version ).value logger.info("Secret info: {} {}".format(type(result), len(result))) # XXX TEMP DEBUG return result else: # TODO: this is just to allow the create_db script to be run locally on initial setup # Really, the KV code above should be updated to work with other non-MSI credentials if "DATABASE_URL" in os.environ: logger.info("Using DATABASE_URL credentials") return os.environ['DATABASE_URL'] else: logger.info("Using local Testing Database") return TestingConfig.SQLALCHEMY_DATABASE_URI
def login(self, credentials=None): if credentials is None: credentials = ServicePrincipalCredentials( client_id='***********************', #appId secret='***********************', #password tenant='***********************') self.client = KeyVaultClient(credentials)
def __init__(self, config=None): from azure.keyvault import KeyVaultClient, KeyVaultAuthentication, AccessToken from msrestazure.azure_active_directory import ServicePrincipalCredentials super(StorageAccountSample, self).__init__(config=config) self.keyvault_sp_client = KeyVaultClient( ServicePrincipalCredentials(client_id=self.config.client_id, secret=self.config.client_secret, tenant=self.config.tenant_id)) # the key vault storage methods, storage_account_set and regenerate_storage_account_key # must be called by an authenticated user (not a service principal) so we create a secondary # client which authenticates a user using device code authentication # create a KeyVaultClient that authenticates as the user def authenticate_user(server, resource, scope, scheme): token = self.get_user_token(resource=resource) return AccessToken(scheme=token['tokenType'], token=token['accessToken'], key=None) self.keyvault_user_client = KeyVaultClient( KeyVaultAuthentication(authorization_callback=authenticate_user)) self._user_id = None self._user_oid = None
def get_secret_from_keyvault(resource_packet: str) -> str: """ Passes the information necessary to access the secret value as URI:ID:Version Args: resource_packet: Informnation packet with contents used for retrieving secret for KeyVault. Returns: extracted secret value for the specified version of the secret """ # This method will raise an exception on failure. It has not been hardened in any way. logger.info('INFO: Attempting to retrieve secret from KeyVault') # Create MSI Authentication credentials = MSIAuthentication() # Obtain hooks into the KeyVault - establish a client client = KeyVaultClient(credentials) resource_uri, secret_id, secret_version = resource_packet.split("|") # Retrieve the secret secret_bundle = client.get_secret(resource_uri, secret_id, secret_version) logger.info('INFO: Secret successfully retrieved from KeyVault') # The secret is stored in a value field. Retrieve this and return to the caller return secret_bundle.value
def auth_using_service_principle_credentials(self): """ authenticates to the Azure Key Vault service using AAD service principle credentials """ # create a vault to validate authentication with the KeyVaultClient vault = self.create_vault() # create the service principle credentials used to authenticate the client credentials = ServicePrincipalCredentials( client_id=self.config.client_id, secret=self.config.client_secret, tenant=self.config.tenant_id) # create the client using the created credentials client = KeyVaultClient(credentials) # set and get a secret from the vault to validate the client is authenticated print('creating secret...') secret_bundle = client.set_secret( vault.properties.vault_uri, 'auth-sample-secret', 'client is authenticated to the vault') print(secret_bundle) print('getting secret...') secret_bundle = client.get_secret( vault.properties.vault_uri, 'auth-sample-secret', secret_version=KeyVaultId.version_none) print(secret_bundle)
def handleHello(): try: # get credentials sys.stdout.write(keyVaultURL + "\n") credentials = MSIAuthentication(resource="https://vault.azure.net") # create a KeyVault client sys.stdout.write("before kv client\n") key_vault_client = KeyVaultClient(credentials) # get the secret sys.stdout.write("before secret\n") secret = key_vault_client.get_secret( keyVaultURL, # Your KeyVault URL "hellomessage", # Name of your secret. If you followed the README 'secret' should exists "" # The version of the secret. Empty string for latest ) sys.stdout.write("after secret\n") message = secret.value res = jsonify({"Message:": message}) except Exception as e: template = "An exception of type {0} occurred. Arguments:\n{1!r}" message = template.format(type(e).__name__, e.args) res = jsonify({"Message:": message}) res.status_code = 500 sys.stdout.flush() return res
def main(): # Load Azure app defaults try: with open('azurermconfig.json') as config_file: config_data = json.load(config_file) except FileNotFoundError: sys.exit("Error: Expecting azurermconfig.json in current folder") tenant_id = config_data['tenantId'] app_id = config_data['appId'] app_secret = config_data['appSecret'] kv_name = config_data['keyvault'] key_vault_uri = f'https://{kv_name}.vault.azure.net/' # get credentials credentials = ServicePrincipalCredentials(client_id=app_id, secret=app_secret, tenant=tenant_id) #token = credentials.token #print('Token is: ' + token['access_token']) # get a key vault client client = KeyVaultClient(credentials) # list the secrets secrets = client.get_secrets(key_vault_uri) print('Listing secrets') for secret_item in secrets: secret_name = secret_item.id.split('/secrets/', 1)[1] secret_bundle = client.get_secret(key_vault_uri, secret_name, '') print(f'{secret_name}: {secret_bundle.value}')
def get_secret_values(self, secret_names): """Reads a list of secrets from Key Vault. Returns a dictionary. Disabled secrets return an empty string for their value. Inactive secrets (outside their activation date range, if defined) return their actual value. We're not currently supporting activation and deactivation dates. """ credentials = ServicePrincipalCredentials( client_id=self._config["client_id"], secret=self._config["app_secret"], tenant=self._config["tenant_id"], ) client = KeyVaultClient(credentials) secret_values = {} for secret_name in secret_names: try: secret_bundle = client.get_secret( self._config["key_vault_uri"], secret_name, "" ) secret_values[secret_name] = secret_bundle.value except KeyVaultErrorException: secret_values[secret_name] = "" return secret_values
def auth_using_adal_callback(self): """ authenticates to the Azure Key Vault by providing a callback to authenticate using adal """ # create a vault to validate authentication with the KeyVaultClient vault = self.create_vault() import adal # create an adal authentication context auth_context = adal.AuthenticationContext('https://login.microsoftonline.com/%s' % self.config.tenant_id) # create a callback to supply the token type and access token on request def adal_callback(server, resource, scope): token = auth_context.acquire_token_with_client_credentials(resource=resource, client_id=self.config.client_id, client_secret=self.config.client_secret) return token['tokenType'], token['accessToken'] # create a KeyVaultAuthentication instance which will callback to the supplied adal_callback auth = KeyVaultAuthentication(adal_callback) # create the KeyVaultClient using the created KeyVaultAuthentication instance client = KeyVaultClient(auth) # set and get a secret from the vault to validate the client is authenticated print('creating secret...') print(client.set_secret(vault.properties.vault_uri, 'auth-sample-secret', 'client is authenticated to the vault')) print('getting secret...') print(client.get_secret(vault.properties.vault_uri, 'auth-sample-secret', secret_version=KeyVaultId.version_none))
def setup_sample(self): """ Provides common setup for Key Vault samples, such as creating rest clients, creating a sample resource group if needed, and ensuring proper access for the service principal. :return: None """ if not self._setup_complete: self.mgmt_creds = ServicePrincipalCredentials( client_id=self.config.client_id, secret=self.config.client_secret, tenant=self.config.tenant_id) self.data_creds = ServicePrincipalCredentials( client_id=self.config.client_id, secret=self.config.client_secret, tenant=self.config.tenant_id) self.resource_mgmt_client = ResourceManagementClient( self.mgmt_creds, self.config.subscription_id) # ensure the service principle has key vault as a valid provider self.resource_mgmt_client.providers.register('Microsoft.KeyVault') # ensure the intended resource group exists self.resource_mgmt_client.resource_groups.create_or_update( self.config.group_name, {'location': self.config.location}) self.keyvault_mgmt_client = KeyVaultManagementClient( self.mgmt_creds, self.config.subscription_id) self.keyvault_data_client = KeyVaultClient(self.data_creds) self._setup_complete = True
def get_secret_value(secret): managed_identity_client_id = os.environ.get("MANAGED_IDENTITY_CLIENT_ID") key_vault_url = os.environ.get("KEY_VAULT_URL") credentials = MSIAuthentication(client_id = managed_identity_client_id) key_vault_client = KeyVaultClient(credentials) secret_value = key_vault_client.get_secret(key_vault_url,secret,"") return secret_value.value
def main(): client_id = os.getenv('AZURE_CLIENT_ID') service_principal_secret = os.getenv('AZURE_SECRET') tenant = os.getenv('AZURE_TENANT') secret_id = os.getenv('AZURE_SECRET_ID') secret_version = os.getenv('AZURE_SECRET_VERSION') vault_url = os.getenv('AZURE_VAULT_URL') # Get Service Principal Credentials credentials = ServicePrincipalCredentials(client_id=client_id, secret=service_principal_secret, tenant=tenant) # Auth with Service Principal client = KeyVaultClient(credentials) # Get Azure Secret from Azure Vault # VAULT_URL must be in the format 'https://<vaultname>.vault.azure.net' # SECRET_VERSION is required, and can be obtained with the KeyVaultClient.get_secret_versions(self, vault_url, secret_id) API secret_bundle = client.get_secret(vault_url, secret_id, secret_version) secret = secret_bundle.value file_path = '/codefresh/volume/env_vars_to_export' with open(file_path, 'a') as file: file.write("{}={}\n".format(secret_id, secret))
def get_keyvault_client(self): # Don't use MSI credentials if the auth_source isn't set to MSI. The below will Always result in credentials when running on an Azure VM. if self.module.params['auth_source'] == 'msi': try: self.log("Get KeyVaultClient from MSI") resource = self.azure_auth._cloud_environment.suffixes.keyvault_dns.split('.', 1).pop() credentials = MSIAuthentication(resource="https://{0}".format(resource)) return KeyVaultClient(credentials) except Exception: self.log("Get KeyVaultClient from service principal") # Create KeyVault Client using KeyVault auth class and auth_callback def auth_callback(server, resource, scope): if self.credentials['client_id'] is None or self.credentials['secret'] is None: self.fail('Please specify client_id, secret and tenant to access azure Key Vault.') tenant = self.credentials.get('tenant') if not self.credentials['tenant']: tenant = "common" authcredential = ServicePrincipalCredentials( client_id=self.credentials['client_id'], secret=self.credentials['secret'], tenant=tenant, cloud_environment=self._cloud_environment, resource="https://vault.azure.net") token = authcredential.token return token['token_type'], token['access_token'] return KeyVaultClient(KeyVaultAuthentication(auth_callback))
def secret( self, secretID=ID_ERR_MISSING_SID, *args ): # secretID='__noSecret__' to catch if no secretID is provided rc = { 'status': ID_STATUS_MISSING_SECRET, 'secretID': secretID } # assume an error if ( len(args) == 0 and not secretID == ID_ERR_MISSING_SID ): # if properly formatted args will be empty and secretID will be something other than ID_SEC_MISSING_SID vaultClient = KeyVaultClient( sbdVaultCreds ) # reauthenticate each time so if anything changed we will fail (is this a good idea?) try: sBundle = vaultClient.get_secret( sbdVaultURL, secretID, '' ) # regarding the ''. . .should really use vC.get_secret_versions() but I fiddled with that all day and it seems broken (thanks Zach for the simple workaround!) secretVal = sBundle.value rc = { 'status': ID_STATUS_OK, 'secretID': secretID, 'secret': secretVal } except: # any exception just return error rc = { 'status': ID_STATUS_SECRET_NOT_FOUND, 'secretID': secretID } return rc
def get_keyvault_client(self): try: self.log("Get KeyVaultClient from MSI") credentials = MSIAuthentication(resource='https://vault.azure.net') return KeyVaultClient(credentials) except Exception: self.log("Get KeyVaultClient from service principal") # Create KeyVault Client using KeyVault auth class and auth_callback def auth_callback(server, resource, scope): if self.credentials['client_id'] is None or self.credentials[ 'secret'] is None: self.fail( 'Please specify client_id, secret and tenant to access azure Key Vault.' ) tenant = self.credentials.get('tenant') if not self.credentials['tenant']: tenant = "common" authcredential = ServicePrincipalCredentials( client_id=self.credentials['client_id'], secret=self.credentials['secret'], tenant=tenant, cloud_environment=self._cloud_environment, resource="https://vault.azure.net") token = authcredential.token return token['token_type'], token['access_token'] return KeyVaultClient(KeyVaultAuthentication(auth_callback))
def main(req: func.HttpRequest) -> func.HttpResponse: logging.info('Python HTTP trigger function processed a request.') # set variables (todo: make environment variables of this) server = "<<your logical sql sever name>>.database.windows.net" database = "<<your sqldb name>>" username = "******" secretname = "<<your secretname in you key vault>>" keyvaultname = "https://<<your key vault name>>.vault.azure.net/" # get secrets from key vault credentials = MSIAuthentication(resource='https://vault.azure.net') kvclient = KeyVaultClient(credentials) password = kvclient.get_secret(keyvaultname, secretname, "").value # Make connection to database connectionstring = 'DRIVER={ODBC Driver 17 for SQL Server};SERVER=' + server + ';DATABASE=' + database + ';UID=' + username + ';PWD=' + password logging.info(connectionstring) cnxn = pyodbc.connect(connectionstring) cursor = cnxn.cursor() # fire query cursor.execute("SELECT @@version;") row = cursor.fetchone() logging.info((row[0])) return func.HttpResponse(str(row[0]))
def get_storage_account_key(accountname, client_id, secret_key, tenant_id, keyvault_account): """ Uses the azure management package and the active directory credentials to fetch the authentication key for a storage account from azure key vault. The key must be stored in azure keyvault for this to work. :param str accountname: storage account name """ def auth_callback(server, resource, scope): credentials = ServicePrincipalCredentials( client_id=client_id, secret=secret_key, tenant=tenant_id, resource="https://vault.azure.net") token = credentials.token return token['token_type'], token['access_token'] client = KeyVaultClient(KeyVaultAuthentication(auth_callback)) keyvault = "https://{}.vault.azure.net/".format(keyvault_account) # passing in empty string for version returns latest key try: secret_bundle = client.get_secret(keyvault, accountname, "") except KeyVaultErrorException: err_str = "The pipeline is not setup to use the {} account. ".format( accountname) err_str += "please add the storage key for the account to {} ".format( keyvault_account) err_str += "as a secret. All input/output paths should start with accountname" raise UnconfiguredStorageAccountError(err_str) account_key = secret_bundle.value return account_key
def __init__(self, client_id, secret, tenant, resource): """Initialize the connection to Key Vault""" credentials = ServicePrincipalCredentials(client_id=client_id, secret=secret, tenant=tenant, resource=resource) self.client = KeyVaultClient(credentials)
def __init__(self, vault_base_url: str = None, client_id: str = None, secret: str = None, tenant: str = None): self.vault_base_url = vault_base_url self.client_id = client_id self.secret = secret self.tenant = tenant try: if self.vault_base_url is None: self.vault_base_url = os.environ["AZURE_VAULT_BASE_URL"] if self.client_id is None: self.client_id = os.environ['AZURE_CLIENT_ID'] if self.secret is None: self.secret = os.environ['AZURE_SECRET_KEY'] if self.tenant is None: self.tenant = os.environ['AZURE_TENANT_ID'] except KeyError as e: print("Did you forget to set the environment variable?", e) sys.exit(1) self.client = KeyVaultClient(KeyVaultAuthentication(_auth_callback))
def main(req: func.HttpRequest) -> func.HttpResponse: req_body = req.get_json() source_container = req_body.get("source_container") source_blob_name = req_body.get("source_blob") dest_folder = req_body.get("dest_folder") dest_container = req_body.get("dest_container") credentials = MSIAuthentication(resource='https://vault.azure.net') kvclient = KeyVaultClient(credentials) key = kvclient.get_secret("https://zhenzh-databricks-kv.vault.azure.net/", "zhenzh-python-func-account-key", "").value converter = Excel2Csv( BlockBlobService(account_name='zhenzhadfblobsource', account_key=key)) if source_blob_name is None: converter.convert_and_upload(source_container, dest_container, "raw", "curated") else: converter.convert_and_upload_blob("apachelog-analysis", "apachelog-analysis", source_blob_name, dest_folder) return json.dumps({"result": "Conversion Finished!"})
def main(argv): if len(sys.argv) < 3: raise ValueError( 'Expecting vault_url, client_id, clientSecretKey,tenant_id') vault_url = sys.argv[1] client_id = sys.argv[2] secretKey = sys.argv[3] tenant_id = sys.argv[4] credentials = ServicePrincipalCredentials(client_id=client_id, secret=secretKey, tenant=tenant_id, resource=resource_uri) client = KeyVaultClient(KeyVaultAuthentication(None, credentials)) token = credentials.token token_type = token['token_type'] access_token = token['access_token'] secrets = client.get_secrets(vault_url) secretsData = {} for secret in secrets: secretName = secret.id.rsplit('/', 1)[-1] secretItem = client.get_secret(vault_url, secretName, '') secretVersion = secretItem.id.rsplit('/', 1)[-1] secretsData[secretName] = secretVersion json_data = json.dumps(secretsData) print(json_data)
def __init__(self, config): self._auth = KeyVaultAuth(config, CLIENT_ID) self._config = config self._mgmt_client = KeyVaultManagementClient( self._auth.get_arm_creds(), config.subscription_id) self._data_client = KeyVaultClient(self._auth.get_keyvault_creds()) self._selected_vault = None self._current_index = None
def get_app_secret(vault_url, secret_name, secret_version=''): """ Return (client_id, client_secret). """ client = KeyVaultClient(KeyVaultAuthentication(auth_callback)) resp = client.get_secret(vault_url, secret_name, secret_version) secret = json.loads(resp.value) return secret['client_id'], secret['client_secret']
def test_vault(): credentials = get_key_vault_credentials() kv_client = KeyVaultClient(credentials) key_vault_uri = KEY_VAULT_URI secret = kv_client.get_secret(key_vault_uri, "test", "") return "secret value = {}".format(secret.value)
def kv_get_secret(secret_id): client = KeyVaultClient(get_kv_credentials()) value_id = client.get_secret(KEY_VAULT, secret_id, "") if (value_id is None): return ("") else: print('Geting Secret %s with value %s' % (secret_id, value_id.value)) return (value_id.value)
def __init__(self, client_id = None, secret = None, tenant_id = None): if client_id != None and secret != None and tenant_id != None: credentials = ServicePrincipalCredentials( client_id = client_id, secret = secret, tenant = tenant_id) else: credentials = MSIAuthentication(resource='https://vault.azure.net') self.kvclient = KeyVaultClient(credentials)
def main(): # Twitter application key client = KeyVaultClient(KeyVaultAuthentication(auth_callback)) _appkey = client.get_secret("https://adfbookkeyvault.vault.azure.net/", "Twitter-appkey", "19bd289d86f449cbb98fd6a51cc63156") _appsecret= client.get_secret("https://adfbookkeyvault.vault.azure.net/", "Twitter-appsecret", "510ceec80ef14af28dd961382be78e66") _appaccesstoken = client.get_secret("https://adfbookkeyvault.vault.azure.net/", "Twitter-appaccesstoken", "e24ad1dfef6b4392bd6277e9ac51e4b8") _appaccesstokensecret = client.get_secret("https://adfbookkeyvault.vault.azure.net/", "Twitter-appaccesstokensecret", "e46bb6b253584ea59d1941bd7f9f1905") _tweetTag= sys.argv[1] # like Azure _tweetReadSince= sys.argv[2] #date from when you want to read tweets like '2018/07/28' _PipelineRunId= sys.argv[3] #Azure Data Factory Pipeline ID 'testrun' # Azure Storage Credential _accountname=client.get_secret("https://adfbookkeyvault.vault.azure.net/", "Storage-accountname", "0f9adaf2abb545b38757b37a0d63fc68") _accountkey=client.get_secret("https://adfbookkeyvault.vault.azure.net/", "Storage-accountkey", "141200d435694c949a7c011bbf55d40a") _InputContainerName='tweetcontainer' # CosmosDB Credential _cdbhost = client.get_secret("https://adfbookkeyvault.vault.azure.net/", "cosmosdbURI", "7c885660bce64bd6ae7b44f1c925486c") _cdbmasterkey = client.get_secret("https://adfbookkeyvault.vault.azure.net/", "cosmosdbPK", "f220ab6df8d240759435953af5d01e43") #hashtag, tweetreadsince, filename includes pipeline id, auth = tweepy.OAuthHandler(_appkey.value, _appsecret.value) auth.set_access_token(_appaccesstoken.value, _appaccesstokensecret.value) tweetapi = tweepy.API(auth,wait_on_rate_limit=True) #local_path=os.path.expanduser("~/Documents") local_file_name ="Tweets_" + _tweetTag + _PipelineRunId + ".csv" full_path_to_file =os.path.join(os.getcwd(), local_file_name) outFile = open(local_file_name,'a') fieldnames = ['Tweet_Time', 'Tweet_Desc'] filewriter = csv.writer(outFile) filewriter.writerow(fieldnames) for tweet in tweepy.Cursor(tweetapi.search,q=_tweetTag,lang="en", since=_tweetReadSince).items(15): try: if tweet.text.encode('utf-8') != '' : filewriter.writerow([tweet.created_at,tweet.text.encode('utf-8')]) insertintoCosmosDB (_cdbhost.value, _cdbmasterkey.value, tweet.created_at,tweet.text.encode('utf-8')) except errors.DocumentDBError as e: if e.status_code == 409: pass else: raise errors.HTTPFailure(e.status_code) print("Error while fetching and storing tweets!!!") outFile.close() break try: print full_path_to_file print local_file_name _blob_service = BlockBlobService(account_name=_accountname.value, account_key=_accountkey.value) _blob_service.create_blob_from_path(_InputContainerName, local_file_name, full_path_to_file) #print(local_file_name) except: print("Error while uploading file to Azure Blob Storage !!!")
def main(req: func.HttpRequest) -> func.HttpResponse: logging.info('Python HTTP trigger function processed a request.') credentials = MSIAuthentication(resource='https://vault.azure.net') kvclient = KeyVaultClient(credentials) key = kvclient.get_secret("https://mnanalyticssandbox-vault.vault.azure.net/", "sakey", "").value converter = Excel2Csv(BlockBlobService(account_name='xlsxupload', account_key=key)) converter.convert_and_upload() return json.dumps({"result": "Conversion Finished!"})
def __init__(self, vault_url): credentials = ServicePrincipalCredentials( client_id = service_principals["redditmends-app"]["clientID"], secret = service_principals["redditmends-app"]["secret"], tenant = service_principals["redditmends-app"]["tenantID"] ) self.client = KeyVaultClient(credentials) self.vault_url = vault_url
def __init__(self, client_id, client_secret, tenant_id): self.client_id = client_id self.tenant_id = tenant_id self.client_secret = client_secret self.credentials = ServicePrincipalCredentials( client_id=self.client_id, secret=self.client_secret, tenant=self.tenant_id, ) self.client = KeyVaultClient(self.credentials)
def _get_keyvault_key_url(keyvault_name, key_name): from azure.cli.core._profile import Profile def get_token(server, resource, scope): # pylint: disable=unused-argument return Profile().get_login_credentials(resource)[0]._token_retriever() # pylint: disable=protected-access from azure.keyvault import KeyVaultClient, KeyVaultAuthentication client = KeyVaultClient(KeyVaultAuthentication(get_token)) result = client.get_key(_get_key_vault_base_url(keyvault_name), key_name, '') return result.key.kid # pylint: disable=no-member
def setUp(self): super(MgmtHDInsightTest, self).setUp() # create client self.hdinsight_client = self.create_mgmt_client(HDInsightManagementClient) self.msi_client = self.create_mgmt_client(ManagedServiceIdentityClient) self.vault_mgmt_client = self.create_mgmt_client(KeyVaultManagementClient) if self.is_live: self.vault_client = self.create_basic_client(KeyVaultClient) else: def _auth_callback(server, resource, scope): return AccessToken('Bearer', 'fake-token') self.vault_client = KeyVaultClient(KeyVaultAuthentication(authorization_callback=_auth_callback)) # sensitive test configs self.tenant_id = self.settings.TENANT_ID self.adls_account_name = self.settings.HDI_ADLS_ACCOUNT_NAME self.adls_client_id = self.settings.HDI_ADLS_CLIENT_ID # Non-sensitive test configs self.cluster_username = '******' self.cluster_password = '******' self.ssh_username = '******' self.ssh_password = '******' self.adls_home_mountpoint = '/clusters/hdi' self.cert_password = '******' self.cert_content = 'MIIJ8gIBAzCCCa4GCSqGSIb3DQEHAaCCCZ8EggmbMIIJlzCCBgAGCSqGSIb3DQEHAaCCBfEEggXtMIIF6TCCBeUGCyqGSIb3DQEMCgECoIIE9jCCBPIwHAYKKoZIhvcNAQwBAzAOBAiTJstpWcGFZAICB9AEggTQZvg9qVE2ptb3hdH9hnDf5pwIeGghe9assBeEKj/W1JMUjsdEu7qzXH9/3Ro6C1HF6MvSqbav7MD8je9AMb0jl7T3ZmXPgGtrbUsSBTPruVv0hTXPRTxQmcfwae5vEkD03b/4W22sXMMYZB7wOTQMl1d5+0wt267qdF+G1XWtRI2jnxetK8/oyMQxn5Cwuz/VAmn1tXwRAN9gIiZDA8MwvpYha/iFVWKu/vnHg8HT47ry+27/wh8ifM9ea7JWhKh2tZoPIMou9/P/CgkkMv9KVHlmiMldA3Phxsrqjbh/wbd8RWBOtSM7rryMVnc1MYonZraDJMGOLGAhvEcXNBKLwRdmrDDYvpOYlDYKlmNvDXYDm410XCOia2aKP0WoP4qLsExtUwW8Qk2r2QRy5O4B5p2EbPZBlPlMMO4S1NkASjJCghuTCUgvk3uwe2L/mMf0IajAf+R0/VW/fXHbhJkFKobi5JlIqWaHsSC7hMidWj6771Yi8UEXOMshWERs2UHH05aIO3c50lnyypHyhA3BohKUXzNhHA0o7ImQRjmjjTJFBLMNiIZSW0aPtLN1+92pT0a6iS7P1PC0DqEnOjkcs/VOUI3qTt0X78b6wnDO+ATo39B13njGD0mtrVvtTeHclBouoFzpKHkS86GSGmDoHQH6EHhpGF/7wPVfAjAiSrNQb/QLjPHWo+BeiOml4Xrti0K6rWb0AXhY8AmtIlEUC9GscDSdT55v9j9tWONzOXECCgZBYDzNlagMLkCDIFHZwbEGPn3pOc7BTOmQf1GQjfvunLiLWWfe3of9pR+NCDyi1VJUNvjoE+/YnVoBBUMBBO6/4t2SL92iouEF4fyqkQFDb0FOPW4Kfh7H5W+sDZIN9NfqNzniK6HFcpS+jnGm9x9rx81DmMcwtiYZTfYDSivtNxOYrmRFXx574stBzvG0En11uc6E4QhWnkCSsBnnOMjRGDyv95BFVMZC0gIS0rWoKYxjdblpmo9w/yfDtAmQuCs3bdqcJ4mMYt0ueUUZImPRQRJOSrVyoq+brLw657EqM1SahtBmzTG7+HTl1Qi/xZ1xmz6paQDSFVPRcb5QSIp4v08j/Lmj0x4R9jQ4cAmZ3CfPKXBKuIRu2AI2EuqGOoAxvQQEpSjSKUs3fbQxjptUhK7o5FcXAfAxHLzdx2/9L1Iqbo/3HDkbmuix24NRXESG0e/kVr7VAGhoALI7L+eKAdn4AkgmBt55FXZ+uHY9bSKZUoz4Oed2bz2A+9sQBcXG06fLqQEwGVPhATEbYyRduuY6AdTRAmOKmadT5BTTD7+dnFlIt+u7ZpbXm6S6LcSqGqHVacig27SwDt0VznQsjMRDVCiHaWKg4W78xbP7YVvNTB/cBCHmhh5ZXfO/TucizXsQPJlwEMr2CbqByqldXi0i1GUrbg4aLUGZtxgUYex7dHlx6GUejOGRh7fLYCNBo43pjCFvbhFwb0/dWya0crJjpGiY3DNtl1YosJBmvso/Rli4QqVeN7tb45ZsGWTEUg1MDeoGRDqal7GDsvBnH574T5Sz3nSLAoNXR7k0rYaWhezJNobo/SDkuSXskVjNJpv+vjEyW2UGYNhaeK/UHKBP8IrmSAEIZejQj6OEzSPM6TNLW5qJb6LK9agxgdswEwYJKoZIhvcNAQkVMQYEBAEAAAAwXQYJKwYBBAGCNxEBMVAeTgBNAGkAYwByAG8AcwBvAGYAdAAgAFMAdAByAG8AbgBnACAAQwByAHkAcAB0AG8AZwByAGEAcABoAGkAYwAgAFAAcgBvAHYAaQBkAGUAcjBlBgkqhkiG9w0BCRQxWB5WAFAAdgBrAFQAbQBwADoAMAAyAGYAZQA0AGUAOAAzAC0AMgAzADEANgAtADQAMQA3AGMALQA5ADQANQBjAC0AZgA1ADUAMABhADUAZAAwAGIAMAAzAGEwggOPBgkqhkiG9w0BBwagggOAMIIDfAIBADCCA3UGCSqGSIb3DQEHATAcBgoqhkiG9w0BDAEDMA4ECAR1hzovrDkgAgIH0ICCA0gq6boOLRoE5PHFfVIXYtzqg1u2vPMm5mgBUvrh3u+VZ/1FMGTj8od9+Yu91cnumVSgfRiGq7lz+K6by5JsBqP68ksLA2d/PqtTdofCoZ7SgVIo+qqzA64HIQFkElNpo/BJMX/JGpc5OlFq7mdHe6xL2Pfx/3z/pNSV+D+WaAwaDnbLqI7MU6ED3j5l63mExk/8H/VVbiPdqMTzbhIp65oHTGanw86w7RlywqeNb3DkPVMB78Jhcg8vf2AxB8hKf6QiO2uJc/4WKkyLoLmNoD/zhaoUuAbC6hrNVAa9VRWNRfwKZrzwIMSLlKYwWmVcD/QgC8gwxuF+wV3UHwDNAdEe8TEsOhE99/ZiUiogxMdkopZwwtZMszzBB/x5mHCGySauDMVPwoYT6QXewJhGrUap0jwB/Vzy5FaWHi/m8964zWpwC6xfkT2hkDb+rfouWutpiAgMne5tD9YvqxTUmZFIlgwpLrVdPcKQS+b/uIXPTv8uW177XsCOmGGu728ld8H1Ifb2nPveK09Y0AA+ARFpOX0p0ZuxMQqk6NnlA+eESJVm5cLfKszorRcrNPINXaEOGl2okuijm8va30FH9GIYWRt+Be6s8qG672aTO/2eHaTHeR/qQ9aEt0ASDXGcugYS14Jnu2wbauyvotZ6eAvgl5tM2leMpgSLaQqYzPTV2uiD6zDUqxwjm2P8EZQihEQqLUV1jvQuQB4Ui7MryDQ+QiDBD2m5c+9SDPafcR7qgRe/cP4hj5BqzHTcNQAD5BLXze7Yx+TMdf+Qe/R1uBYm8bLjUv9hwUmtMeZP4RU6RPJrN2aRf7BUdgS0j/8YAhxViPucRENuhEfS4sotHf1CJZ7xJz0ZE9cpVY6JLl1tbmuf1Eh50cicZ1SHQhqSP0ggLHV6DNcJz+kyekEe9qggGDi6mreYz/kJnnumsDy5cToIHy9jJhtXzj+/ZNGkdpq9HWMiwAT/VR1WPpzjn06m7Z87PiLaiC3simQtjnl0gVF11Ev4rbIhYjFBL0nKfNpzaWlMaOVF+EumROk3EbZVpx1K6Yh0zWh/NocWSUrtSoHVklzwPCNRvnE1Ehyw5t9YbEBsTIDWRYyqbVYoFVfOUhq5p4TXrqEwOzAfMAcGBSsOAwIaBBSx7sJo66zYk4VOsgD9V/co1SikFAQUUvU/kE4wTRnPRnaWXnno+FCb56kCAgfQ' self.workspace_id = '1d364e89-bb71-4503-aa3d-a23535aea7bd'
def exec_module(self, **kwargs): for key in list(self.module_arg_spec.keys()) + ['tags']: setattr(self, key, kwargs[key]) # Create KeyVaultClient self.client = KeyVaultClient(self.azure_credentials) results = dict() changed = False try: results['key_id'] = self.get_key(self.key_name) # Key exists and will be deleted if self.state == 'absent': changed = True except KeyVaultErrorException: # Key doesn't exist if self.state == 'present': changed = True self.results['changed'] = changed self.results['state'] = results if not self.check_mode: # Create key if self.state == 'present' and changed: results['key_id'] = self.create_key(self.key_name, self.tags) self.results['state'] = results self.results['state']['status'] = 'Created' # Delete key elif self.state == 'absent' and changed: results['key_id'] = self.delete_key(self.key_name) self.results['state'] = results self.results['state']['status'] = 'Deleted' else: if self.state == 'present' and changed: self.results['state']['status'] = 'Created' elif self.state == 'absent' and changed: self.results['state']['status'] = 'Deleted' return self.results
class AzureRMKeyVaultKey(AzureRMModuleBase): ''' Module that creates or deletes keys in Azure KeyVault ''' def __init__(self): self.module_arg_spec = dict( key_name=dict(type='str', required=True), keyvault_uri=dict(type='str', required=True), pem_file=dict(type='str'), pem_password=dict(type='str'), byok_file=dict(type='str'), state=dict(type='str', default='present', choices=['present', 'absent']) ) self.results = dict( changed=False, state=dict() ) self.key_name = None self.keyvault_uri = None self.pem_file = None self.pem_password = None self.state = None self.client = None self.tags = None required_if = [ ('pem_password', 'present', ['pem_file']) ] super(AzureRMKeyVaultKey, self).__init__(self.module_arg_spec, supports_check_mode=True, required_if=required_if, supports_tags=True) def exec_module(self, **kwargs): for key in list(self.module_arg_spec.keys()) + ['tags']: setattr(self, key, kwargs[key]) # Create KeyVaultClient self.client = KeyVaultClient(self.azure_credentials) results = dict() changed = False try: results['key_id'] = self.get_key(self.key_name) # Key exists and will be deleted if self.state == 'absent': changed = True except KeyVaultErrorException: # Key doesn't exist if self.state == 'present': changed = True self.results['changed'] = changed self.results['state'] = results if not self.check_mode: # Create key if self.state == 'present' and changed: results['key_id'] = self.create_key(self.key_name, self.tags) self.results['state'] = results self.results['state']['status'] = 'Created' # Delete key elif self.state == 'absent' and changed: results['key_id'] = self.delete_key(self.key_name) self.results['state'] = results self.results['state']['status'] = 'Deleted' else: if self.state == 'present' and changed: self.results['state']['status'] = 'Created' elif self.state == 'absent' and changed: self.results['state']['status'] = 'Deleted' return self.results def get_key(self, name, version=''): ''' Gets an existing key ''' key_bundle = self.client.get_key(self.keyvault_uri, name, version) if key_bundle: key_id = KeyVaultId.parse_key_id(key_bundle.key.kid) return key_id.id def create_key(self, name, tags, kty='RSA'): ''' Creates a key ''' key_bundle = self.client.create_key(self.keyvault_uri, name, kty, tags=tags) key_id = KeyVaultId.parse_key_id(key_bundle.key.kid) return key_id.id def delete_key(self, name): ''' Deletes a key ''' deleted_key = self.client.delete_key(self.keyvault_uri, name) key_id = KeyVaultId.parse_key_id(deleted_key.key.kid) return key_id.id def import_key(self, key_name, destination=None, key_ops=None, disabled=False, expires=None, not_before=None, tags=None, pem_file=None, pem_password=None, byok_file=None): """ Import a private key. Supports importing base64 encoded private keys from PEM files. Supports importing BYOK keys into HSM for premium KeyVaults. """ def _to_bytes(hex_string): # zero pads and decodes a hex string if len(hex_string) % 2: hex_string = '{0}'.format(hex_string) return codecs.decode(hex_string, 'hex_codec') def _set_rsa_parameters(dest, src): # map OpenSSL parameter names to JsonWebKey property names conversion_dict = { 'modulus': 'n', 'publicExponent': 'e', 'privateExponent': 'd', 'prime1': 'p', 'prime2': 'q', 'exponent1': 'dp', 'exponent2': 'dq', 'coefficient': 'qi' } # regex: looks for matches that fit the following patterns: # integerPattern: 65537 (0x10001) # hexPattern: # 00:a0:91:4d:00:23:4a:c6:83:b2:1b:4c:15:d5:be: # d8:87:bd:c9:59:c2:e5:7a:f5:4a:e7:34:e8:f0:07: # The desired match should always be the first component of the match regex = re.compile(r'([^:\s]*(:[^\:)]+\))|([^:\s]*(:\s*[0-9A-Fa-f]{2})+))') # regex2: extracts the hex string from a format like: 65537 (0x10001) regex2 = re.compile(r'(?<=\(0x{1})([0-9A-Fa-f]*)(?=\))') key_params = crypto.dump_privatekey(crypto.FILETYPE_TEXT, src).decode('utf-8') for match in regex.findall(key_params): comps = match[0].split(':', 1) name = conversion_dict.get(comps[0], None) if name: value = comps[1].replace(' ', '').replace('\n', '').replace(':', '') try: value = _to_bytes(value) except Exception: # pylint:disable=broad-except # if decoding fails it is because of an integer pattern. Extract the hex # string and retry value = _to_bytes(regex2.findall(value)[0]) setattr(dest, name, value) key_attrs = KeyAttributes(not disabled, not_before, expires) key_obj = JsonWebKey(key_ops=key_ops) if pem_file: key_obj.kty = 'RSA' with open(pem_file, 'r') as f: pem_data = f.read() # load private key and prompt for password if encrypted try: pem_password = str(pem_password).encode() if pem_password else None # despite documentation saying password should be a string, it needs to actually # be UTF-8 encoded bytes pkey = crypto.load_privatekey(crypto.FILETYPE_PEM, pem_data, pem_password) except crypto.Error: pass # wrong password except TypeError: pass # no pass provided _set_rsa_parameters(key_obj, pkey) elif byok_file: with open(byok_file, 'rb') as f: byok_data = f.read() key_obj.kty = 'RSA-HSM' key_obj.t = byok_data return self.client.import_key( self.keyvault_uri, key_name, key_obj, destination == 'hsm', key_attrs, tags)
class MgmtHDInsightTest(AzureMgmtTestCase): def setUp(self): super(MgmtHDInsightTest, self).setUp() # create client self.hdinsight_client = self.create_mgmt_client(HDInsightManagementClient) self.msi_client = self.create_mgmt_client(ManagedServiceIdentityClient) self.vault_mgmt_client = self.create_mgmt_client(KeyVaultManagementClient) if self.is_live: self.vault_client = self.create_basic_client(KeyVaultClient) else: def _auth_callback(server, resource, scope): return AccessToken('Bearer', 'fake-token') self.vault_client = KeyVaultClient(KeyVaultAuthentication(authorization_callback=_auth_callback)) # sensitive test configs self.tenant_id = self.settings.TENANT_ID self.adls_account_name = self.settings.HDI_ADLS_ACCOUNT_NAME self.adls_client_id = self.settings.HDI_ADLS_CLIENT_ID # Non-sensitive test configs self.cluster_username = '******' self.cluster_password = '******' self.ssh_username = '******' self.ssh_password = '******' self.adls_home_mountpoint = '/clusters/hdi' self.cert_password = '******' self.cert_content = 'MIIJ8gIBAzCCCa4GCSqGSIb3DQEHAaCCCZ8EggmbMIIJlzCCBgAGCSqGSIb3DQEHAaCCBfEEggXtMIIF6TCCBeUGCyqGSIb3DQEMCgECoIIE9jCCBPIwHAYKKoZIhvcNAQwBAzAOBAiTJstpWcGFZAICB9AEggTQZvg9qVE2ptb3hdH9hnDf5pwIeGghe9assBeEKj/W1JMUjsdEu7qzXH9/3Ro6C1HF6MvSqbav7MD8je9AMb0jl7T3ZmXPgGtrbUsSBTPruVv0hTXPRTxQmcfwae5vEkD03b/4W22sXMMYZB7wOTQMl1d5+0wt267qdF+G1XWtRI2jnxetK8/oyMQxn5Cwuz/VAmn1tXwRAN9gIiZDA8MwvpYha/iFVWKu/vnHg8HT47ry+27/wh8ifM9ea7JWhKh2tZoPIMou9/P/CgkkMv9KVHlmiMldA3Phxsrqjbh/wbd8RWBOtSM7rryMVnc1MYonZraDJMGOLGAhvEcXNBKLwRdmrDDYvpOYlDYKlmNvDXYDm410XCOia2aKP0WoP4qLsExtUwW8Qk2r2QRy5O4B5p2EbPZBlPlMMO4S1NkASjJCghuTCUgvk3uwe2L/mMf0IajAf+R0/VW/fXHbhJkFKobi5JlIqWaHsSC7hMidWj6771Yi8UEXOMshWERs2UHH05aIO3c50lnyypHyhA3BohKUXzNhHA0o7ImQRjmjjTJFBLMNiIZSW0aPtLN1+92pT0a6iS7P1PC0DqEnOjkcs/VOUI3qTt0X78b6wnDO+ATo39B13njGD0mtrVvtTeHclBouoFzpKHkS86GSGmDoHQH6EHhpGF/7wPVfAjAiSrNQb/QLjPHWo+BeiOml4Xrti0K6rWb0AXhY8AmtIlEUC9GscDSdT55v9j9tWONzOXECCgZBYDzNlagMLkCDIFHZwbEGPn3pOc7BTOmQf1GQjfvunLiLWWfe3of9pR+NCDyi1VJUNvjoE+/YnVoBBUMBBO6/4t2SL92iouEF4fyqkQFDb0FOPW4Kfh7H5W+sDZIN9NfqNzniK6HFcpS+jnGm9x9rx81DmMcwtiYZTfYDSivtNxOYrmRFXx574stBzvG0En11uc6E4QhWnkCSsBnnOMjRGDyv95BFVMZC0gIS0rWoKYxjdblpmo9w/yfDtAmQuCs3bdqcJ4mMYt0ueUUZImPRQRJOSrVyoq+brLw657EqM1SahtBmzTG7+HTl1Qi/xZ1xmz6paQDSFVPRcb5QSIp4v08j/Lmj0x4R9jQ4cAmZ3CfPKXBKuIRu2AI2EuqGOoAxvQQEpSjSKUs3fbQxjptUhK7o5FcXAfAxHLzdx2/9L1Iqbo/3HDkbmuix24NRXESG0e/kVr7VAGhoALI7L+eKAdn4AkgmBt55FXZ+uHY9bSKZUoz4Oed2bz2A+9sQBcXG06fLqQEwGVPhATEbYyRduuY6AdTRAmOKmadT5BTTD7+dnFlIt+u7ZpbXm6S6LcSqGqHVacig27SwDt0VznQsjMRDVCiHaWKg4W78xbP7YVvNTB/cBCHmhh5ZXfO/TucizXsQPJlwEMr2CbqByqldXi0i1GUrbg4aLUGZtxgUYex7dHlx6GUejOGRh7fLYCNBo43pjCFvbhFwb0/dWya0crJjpGiY3DNtl1YosJBmvso/Rli4QqVeN7tb45ZsGWTEUg1MDeoGRDqal7GDsvBnH574T5Sz3nSLAoNXR7k0rYaWhezJNobo/SDkuSXskVjNJpv+vjEyW2UGYNhaeK/UHKBP8IrmSAEIZejQj6OEzSPM6TNLW5qJb6LK9agxgdswEwYJKoZIhvcNAQkVMQYEBAEAAAAwXQYJKwYBBAGCNxEBMVAeTgBNAGkAYwByAG8AcwBvAGYAdAAgAFMAdAByAG8AbgBnACAAQwByAHkAcAB0AG8AZwByAGEAcABoAGkAYwAgAFAAcgBvAHYAaQBkAGUAcjBlBgkqhkiG9w0BCRQxWB5WAFAAdgBrAFQAbQBwADoAMAAyAGYAZQA0AGUAOAAzAC0AMgAzADEANgAtADQAMQA3AGMALQA5ADQANQBjAC0AZgA1ADUAMABhADUAZAAwAGIAMAAzAGEwggOPBgkqhkiG9w0BBwagggOAMIIDfAIBADCCA3UGCSqGSIb3DQEHATAcBgoqhkiG9w0BDAEDMA4ECAR1hzovrDkgAgIH0ICCA0gq6boOLRoE5PHFfVIXYtzqg1u2vPMm5mgBUvrh3u+VZ/1FMGTj8od9+Yu91cnumVSgfRiGq7lz+K6by5JsBqP68ksLA2d/PqtTdofCoZ7SgVIo+qqzA64HIQFkElNpo/BJMX/JGpc5OlFq7mdHe6xL2Pfx/3z/pNSV+D+WaAwaDnbLqI7MU6ED3j5l63mExk/8H/VVbiPdqMTzbhIp65oHTGanw86w7RlywqeNb3DkPVMB78Jhcg8vf2AxB8hKf6QiO2uJc/4WKkyLoLmNoD/zhaoUuAbC6hrNVAa9VRWNRfwKZrzwIMSLlKYwWmVcD/QgC8gwxuF+wV3UHwDNAdEe8TEsOhE99/ZiUiogxMdkopZwwtZMszzBB/x5mHCGySauDMVPwoYT6QXewJhGrUap0jwB/Vzy5FaWHi/m8964zWpwC6xfkT2hkDb+rfouWutpiAgMne5tD9YvqxTUmZFIlgwpLrVdPcKQS+b/uIXPTv8uW177XsCOmGGu728ld8H1Ifb2nPveK09Y0AA+ARFpOX0p0ZuxMQqk6NnlA+eESJVm5cLfKszorRcrNPINXaEOGl2okuijm8va30FH9GIYWRt+Be6s8qG672aTO/2eHaTHeR/qQ9aEt0ASDXGcugYS14Jnu2wbauyvotZ6eAvgl5tM2leMpgSLaQqYzPTV2uiD6zDUqxwjm2P8EZQihEQqLUV1jvQuQB4Ui7MryDQ+QiDBD2m5c+9SDPafcR7qgRe/cP4hj5BqzHTcNQAD5BLXze7Yx+TMdf+Qe/R1uBYm8bLjUv9hwUmtMeZP4RU6RPJrN2aRf7BUdgS0j/8YAhxViPucRENuhEfS4sotHf1CJZ7xJz0ZE9cpVY6JLl1tbmuf1Eh50cicZ1SHQhqSP0ggLHV6DNcJz+kyekEe9qggGDi6mreYz/kJnnumsDy5cToIHy9jJhtXzj+/ZNGkdpq9HWMiwAT/VR1WPpzjn06m7Z87PiLaiC3simQtjnl0gVF11Ev4rbIhYjFBL0nKfNpzaWlMaOVF+EumROk3EbZVpx1K6Yh0zWh/NocWSUrtSoHVklzwPCNRvnE1Ehyw5t9YbEBsTIDWRYyqbVYoFVfOUhq5p4TXrqEwOzAfMAcGBSsOAwIaBBSx7sJo66zYk4VOsgD9V/co1SikFAQUUvU/kE4wTRnPRnaWXnno+FCb56kCAgfQ' self.workspace_id = '1d364e89-bb71-4503-aa3d-a23535aea7bd' @ResourceGroupPreparer(name_prefix='hdipy-', location=LOCATION) @StorageAccountPreparer(name_prefix='hdipy', location=LOCATION) def test_create_humboldt_cluster(self, resource_group, location, storage_account, storage_account_key): cluster_name = self.get_resource_name('hdisdk-humboldt') create_params = self.get_cluster_create_params(location, cluster_name, storage_account, storage_account_key) create_poller = self.hdinsight_client.clusters.create(resource_group.name, cluster_name, create_params) cluster = create_poller.result() self.validate_cluster(cluster_name, create_params, cluster) @ResourceGroupPreparer(name_prefix='hdipy-', location=LOCATION) @StorageAccountPreparer(name_prefix='hdipy', location=LOCATION) def test_create_humboldt_cluster_with_premium_tier(self, resource_group, location, storage_account, storage_account_key): cluster_name = self.get_resource_name('hdisdk-premium') create_params = self.get_cluster_create_params(location, cluster_name, storage_account, storage_account_key) create_params.properties.tier=Tier.premium create_poller = self.hdinsight_client.clusters.create(resource_group.name, cluster_name, create_params) cluster = create_poller.result() self.validate_cluster(cluster_name, create_params, cluster) @ResourceGroupPreparer(name_prefix='hdipy-', location=LOCATION) @StorageAccountPreparer(name_prefix='hdipy', location=LOCATION) def test_create_with_empty_extended_parameters(self, resource_group, location, storage_account, storage_account_key): cluster_name = self.get_resource_name('hdisdk-cluster') create_params = ClusterCreateParametersExtended() # try to create cluster and ensure it throws try: create_poller = self.hdinsight_client.clusters.create(resource_group.name, cluster_name, create_params) cluster = create_poller.result() self.assertTrue(False, 'should not have made it here') except Exception: pass @ResourceGroupPreparer(name_prefix='hdipy-', location=LOCATION) @StorageAccountPreparer(name_prefix='hdipy', location=LOCATION) def test_create_humboldt_cluster_with_custom_vm_sizes(self, resource_group, location, storage_account, storage_account_key): cluster_name = self.get_resource_name('hdisdk-customvmsizes') create_params = self.get_cluster_create_params(location, cluster_name, storage_account, storage_account_key) headnode = next(item for item in create_params.properties.compute_profile.roles if item.name == 'headnode') headnode.hardware_profile = HardwareProfile(vm_size="ExtraLarge") zookeepernode = next(item for item in create_params.properties.compute_profile.roles if item.name == 'zookeepernode') zookeepernode.hardware_profile = HardwareProfile(vm_size="Medium") create_poller = self.hdinsight_client.clusters.create(resource_group.name, cluster_name, create_params) cluster = create_poller.result() self.validate_cluster(cluster_name, create_params, cluster) @ResourceGroupPreparer(name_prefix='hdipy-', location=LOCATION) @StorageAccountPreparer(name_prefix='hdipy', location=LOCATION) def test_create_linux_spark_cluster_with_component_version(self, resource_group, location, storage_account, storage_account_key): cluster_name = self.get_resource_name('hdisdk-sparkcomponentversions') create_params = self.get_cluster_create_params(location, cluster_name, storage_account, storage_account_key) create_params.properties.cluster_definition.kind = 'Spark' create_params.properties.cluster_definition.Component_version = {'Spark' : '2.2'} create_poller = self.hdinsight_client.clusters.create(resource_group.name, cluster_name, create_params) cluster = create_poller.result() self.validate_cluster(cluster_name, create_params, cluster) @ResourceGroupPreparer(name_prefix='hdipy-', location=LOCATION) @StorageAccountPreparer(name_prefix='hdipy', location=LOCATION) def test_create_kafka_cluster_with_managed_disks(self, resource_group, location, storage_account, storage_account_key): cluster_name = self.get_resource_name('hdisdk-kafka') create_params = self.get_cluster_create_params(location, cluster_name, storage_account, storage_account_key) create_params.properties.cluster_definition.kind = 'Kafka' workernode = next(item for item in create_params.properties.compute_profile.roles if item.name == 'workernode') workernode.data_disks_groups = [ DataDisksGroups( disks_per_node=8 ) ] create_poller = self.hdinsight_client.clusters.create(resource_group.name, cluster_name, create_params) cluster = create_poller.result() self.validate_cluster(cluster_name, create_params, cluster) @ResourceGroupPreparer(name_prefix='hdipy-', location=LOCATION) @StorageAccountPreparer(name_prefix='hdipy', location=LOCATION) @KeyVaultPreparer(name_prefix='hdipy', location=LOCATION, enable_soft_delete=True) def test_create_kafka_cluster_with_disk_encryption(self, resource_group, location, storage_account, storage_account_key, vault): # create managed identities for Azure resources. msi_name = self.get_resource_name('hdipyuai') msi = self.msi_client.user_assigned_identities.create_or_update(resource_group.name, msi_name, location) # add managed identity to vault required_permissions = Permissions(keys=[KeyPermissions.get, KeyPermissions.wrap_key, KeyPermissions.unwrap_key], secrets=[SecretPermissions.get, SecretPermissions.set,SecretPermissions.delete]) vault.properties.access_policies.append( AccessPolicyEntry(tenant_id=self.tenant_id, object_id=msi.principal_id, permissions=required_permissions) ) update_params = VaultCreateOrUpdateParameters(location=location, properties=vault.properties) vault = self.vault_mgmt_client.vaults.create_or_update(resource_group.name, vault.name, update_params).result() self.assertIsNotNone(vault) # create key vault_uri = vault.properties.vault_uri key_name = self.get_resource_name('hdipykey1') created_bundle = self.vault_client.create_key(vault_uri, key_name, 'RSA') vault_key = KeyVaultId.parse_key_id(created_bundle.key.kid) # create HDInsight cluster with Kafka disk encryption rg_name = resource_group.name cluster_name = self.get_resource_name('hdisdk-kafka-byok') create_params = self.get_cluster_create_params(location, cluster_name, storage_account, storage_account_key) create_params.properties.cluster_definition.kind = 'Kafka' workernode = next(item for item in create_params.properties.compute_profile.roles if item.name == 'workernode') workernode.data_disks_groups = [ DataDisksGroups( disks_per_node=8 ) ] create_params.identity = ClusterIdentity( type=ResourceIdentityType.user_assigned, user_assigned_identities={msi.id: ClusterIdentityUserAssignedIdentitiesValue()} ) create_params.properties.disk_encryption_properties = DiskEncryptionProperties( vault_uri=vault_key.vault, key_name=vault_key.name, key_version=vault_key.version, msi_resource_id=msi.id ) cluster = self.hdinsight_client.clusters.create(resource_group.name, cluster_name, create_params).result() self.validate_cluster(cluster_name, create_params, cluster) # check disk encryption properties self.assertIsNotNone(cluster.properties.disk_encryption_properties) self.assertEqual(create_params.properties.disk_encryption_properties.vault_uri, cluster.properties.disk_encryption_properties.vault_uri) self.assertEqual(create_params.properties.disk_encryption_properties.key_name, cluster.properties.disk_encryption_properties.key_name) self.assertEqual(create_params.properties.disk_encryption_properties.msi_resource_id.lower(), cluster.properties.disk_encryption_properties.msi_resource_id.lower()) # create a new key new_key_name = self.get_resource_name('hdipykey2') created_bundle = self.vault_client.create_key(vault_uri, new_key_name, 'RSA') new_vault_key = KeyVaultId.parse_key_id(created_bundle.key.kid) rotate_params = ClusterDiskEncryptionParameters( vault_uri=new_vault_key.vault, key_name=new_vault_key.name, key_version=new_vault_key.version ) # rotate cluster key self.hdinsight_client.clusters.rotate_disk_encryption_key(rg_name, cluster_name, rotate_params).wait() cluster = self.hdinsight_client.clusters.get(rg_name, cluster_name) # check disk encryption properties self.assertIsNotNone(cluster.properties.disk_encryption_properties) self.assertEqual(rotate_params.vault_uri, cluster.properties.disk_encryption_properties.vault_uri) self.assertEqual(rotate_params.key_name, cluster.properties.disk_encryption_properties.key_name) self.assertEqual(msi.id.lower(), cluster.properties.disk_encryption_properties.msi_resource_id.lower()) @ResourceGroupPreparer(name_prefix='hdipy-', location=ADLS_LOCATION) @StorageAccountPreparer(name_prefix='hdipy', location=ADLS_LOCATION) def test_create_with_adls_gen1(self, resource_group, location, storage_account, storage_account_key): cluster_name = self.get_resource_name('hdisdk-adlsgen1') create_params = self.get_cluster_create_params_for_adls_gen1(location, cluster_name) # Add additional storage account create_params.properties.storage_profile.storageaccounts.append( StorageAccount( name=storage_account.name + STORAGE_BLOB_SERVICE_ENDPOINT_SUFFIX, key=storage_account_key, container=cluster_name.lower(), is_default=False ) ) cluster = self.hdinsight_client.clusters.create(resource_group.name, cluster_name, create_params).result() self.validate_cluster(cluster_name, create_params, cluster) @ResourceGroupPreparer(name_prefix='hdipy-', location=LOCATION) @StorageAccountPreparer(name_prefix='hdipy', location=LOCATION, kind=Kind.storage_v2) @StorageAccountPreparer(name_prefix='hdipy2', location=LOCATION, parameter_name='second_storage_account') def test_create_with_adls_gen2(self, resource_group, location, storage_account, storage_account_key, second_storage_account, second_storage_account_key): cluster_name = self.get_resource_name('hdisdk-adlgen2') create_params = self.get_cluster_create_params_for_adls_gen2(location, cluster_name, storage_account, storage_account_key) # Add additional storage account create_params.properties.storage_profile.storageaccounts.append( StorageAccount( name=second_storage_account.name + STORAGE_BLOB_SERVICE_ENDPOINT_SUFFIX, key=second_storage_account_key, container=cluster_name.lower(), is_default=False ) ) cluster = self.hdinsight_client.clusters.create(resource_group.name, cluster_name, create_params).result() self.validate_cluster(cluster_name, create_params, cluster) @ResourceGroupPreparer(name_prefix='hdipy-', location=LOCATION) @StorageAccountPreparer(name_prefix='hdipy', location=LOCATION) @StorageAccountPreparer(name_prefix='hdipy2', location=LOCATION, parameter_name='second_storage_account') def test_create_with_additional_storage(self, resource_group, location, storage_account, storage_account_key, second_storage_account, second_storage_account_key): cluster_name = self.get_resource_name('hdisdk-additional') create_params = self.get_cluster_create_params(location, cluster_name, storage_account, storage_account_key) # Add additional storage account create_params.properties.storage_profile.storageaccounts.append( StorageAccount( name=second_storage_account.name + STORAGE_BLOB_SERVICE_ENDPOINT_SUFFIX, key=second_storage_account_key, container=cluster_name.lower(), is_default=False ) ) # Add data lake storage gen1 access create_params = self.get_cluster_create_params_for_adls_gen1(location, cluster_name, create_params) cluster = self.hdinsight_client.clusters.create(resource_group.name, cluster_name, create_params).result() self.validate_cluster(cluster_name, create_params, cluster) @ResourceGroupPreparer(name_prefix='hdipy-', location=LOCATION) @StorageAccountPreparer(name_prefix='hdipy', location=LOCATION) def test_create_rserver_cluster(self, resource_group, location, storage_account, storage_account_key): cluster_name = self.get_resource_name('hdisdk-rserver') create_params = self.get_cluster_create_params(location, cluster_name, storage_account, storage_account_key) create_params.properties.cluster_definition.kind = 'RServer' create_params.properties.compute_profile.roles.append( Role( name="edgenode", target_instance_count=1, hardware_profile=HardwareProfile(vm_size="Standard_D4_v2"), os_profile=OsProfile( linux_operating_system_profile=LinuxOperatingSystemProfile( username=self.ssh_username, password=self.ssh_password ) ) ) ) create_poller = self.hdinsight_client.clusters.create(resource_group.name, cluster_name, create_params) cluster = create_poller.result() self.validate_cluster(cluster_name, create_params, cluster) @ResourceGroupPreparer(name_prefix='hdipy-', location=LOCATION) @StorageAccountPreparer(name_prefix='hdipy', location=LOCATION) def test_create_mlservices_cluster(self, resource_group, location, storage_account, storage_account_key): cluster_name = self.get_resource_name('hdisdk-mlservices') create_params = self.get_cluster_create_params(location, cluster_name, storage_account, storage_account_key) create_params.properties.cluster_version="3.6" create_params.properties.cluster_definition.kind = 'MLServices' create_params.properties.compute_profile.roles.append( Role( name="edgenode", target_instance_count=1, hardware_profile=HardwareProfile(vm_size="Standard_D4_v2"), os_profile=OsProfile( linux_operating_system_profile=LinuxOperatingSystemProfile( username=self.ssh_username, password=self.ssh_password ) ) ) ) create_poller = self.hdinsight_client.clusters.create(resource_group.name, cluster_name, create_params) cluster = create_poller.result() self.validate_cluster(cluster_name, create_params, cluster) @ResourceGroupPreparer(name_prefix='hdipy-', location=LOCATION) @StorageAccountPreparer(name_prefix='hdipy', location=LOCATION) def test_list_clusters_in_resource_group(self, resource_group, location, storage_account, storage_account_key): rg_name = resource_group.name cluster_name1 = self.get_resource_name("hdisdk-cluster-rg1") cluster_name2 = self.get_resource_name("hdisdk-cluster-rg2") cluster_list = list(self.hdinsight_client.clusters.list_by_resource_group(rg_name)) self.assertFalse(any(c.name == cluster_name1 for c in cluster_list)) self.assertFalse(any(c.name == cluster_name2 for c in cluster_list)) create_params1 = self.get_cluster_create_params(location, cluster_name1, storage_account, storage_account_key) create_params2 = self.get_cluster_create_params(location, cluster_name2, storage_account, storage_account_key) self.hdinsight_client.clusters.create(resource_group.name, cluster_name1, create_params1).wait() self.hdinsight_client.clusters.create(resource_group.name, cluster_name2, create_params2).wait() cluster_list = list(self.hdinsight_client.clusters.list_by_resource_group(rg_name)) self.assertTrue(any(c.name == cluster_name1 for c in cluster_list)) self.assertTrue(any(c.name == cluster_name2 for c in cluster_list)) @unittest.skip('This test case lists all clusters under a subscription. ' 'In order to avoid storing those cluster infos in session records, skip it for now.') @ResourceGroupPreparer(name_prefix='hdipy-', location=LOCATION) @StorageAccountPreparer(name_prefix='hdipy', location=LOCATION) def test_list_clusters_in_subscription(self, resource_group, location, storage_account, storage_account_key): rg_name = resource_group.name cluster_name1 = self.get_resource_name("hdisdk-cluster") cluster_name2 = self.get_resource_name("hdisdk-cluster") cluster_list = list(self.hdinsight_client.clusters.list()) self.assertFalse(any(c.name == cluster_name1 for c in cluster_list)) self.assertFalse(any(c.name == cluster_name2 for c in cluster_list)) create_params1 = self.get_cluster_create_params(location, cluster_name1, storage_account, storage_account_key) create_params2 = self.get_cluster_create_params(location, cluster_name2, storage_account, storage_account_key) self.hdinsight_client.clusters.create(resource_group.name, cluster_name1, create_params1).wait() self.hdinsight_client.clusters.create(resource_group.name, cluster_name2, create_params2).wait() cluster_list = list(self.hdinsight_client.clusters.list()) self.assertTrue(any(c.name == cluster_name1 for c in cluster_list)) self.assertTrue(any(c.name == cluster_name2 for c in cluster_list)) @ResourceGroupPreparer(name_prefix='hdipy-', location=LOCATION) @StorageAccountPreparer(name_prefix='hdipy', location=LOCATION) def test_hue_on_running_cluster(self, resource_group, location, storage_account, storage_account_key): cluster_name = self.get_resource_name('hdisdk-applications-hue') create_params = self.get_cluster_create_params(location, cluster_name, storage_account, storage_account_key) create_params.properties.cluster_version="3.6" create_poller = self.hdinsight_client.clusters.create(resource_group.name, cluster_name, create_params) cluster = create_poller.result() self.validate_cluster(cluster_name, create_params, cluster) application_name = "MyApplication" application = Application( properties=ApplicationProperties( install_script_actions=[ RuntimeScriptAction( name="InstallHue", uri="https://hdiconfigactions.blob.core.windows.net/linuxhueconfigactionv02/install-hue-uber-v02.sh", parameters="-version latest -port 20000", roles=["edgenode"] ) ], application_type="CustomApplication", compute_profile=ComputeProfile( roles=[ Role( name="edgenode", hardware_profile=HardwareProfile( vm_size="Large" ), target_instance_count = 1 ) ] ) ) ) self.hdinsight_client.applications.create(resource_group.name, cluster_name, application_name, application).wait() application_list = list(self.hdinsight_client.applications.list_by_cluster(resource_group.name, cluster_name)) self.assertGreater(len(application_list), 0) application_match = [item for item in application_list if item.name == application_name] self.assertIsNotNone(application_match) self.assertEqual(len(application_match), 1) self.hdinsight_client.applications.delete(resource_group.name, cluster_name, application_name).wait() application_list = list(self.hdinsight_client.applications.list_by_cluster(resource_group.name, cluster_name)) self.assertEqual(len(application_list), 0) @ResourceGroupPreparer(name_prefix='hdipy-', location=LOCATION) @StorageAccountPreparer(name_prefix='hdipy', location=LOCATION) def test_get_configurations(self, resource_group, location, storage_account, storage_account_key): rg_name = resource_group.name cluster_name = self.get_resource_name('hdisdk-configs') create_params = self.get_cluster_create_params(location, cluster_name, storage_account, storage_account_key) hive_site = 'hive-site' hive_config = { 'key1': 'value1', 'key2': 'value2' } mapred_site = 'mapred-site' mapred_config = { 'key5': 'value5', 'key6': 'value6' } yarn_site = 'yarn-site' yarn_config = { 'key7': 'value7', 'key8': 'value8' } core_site = 'core-site' gateway = 'gateway' create_params.properties.cluster_definition.configurations[hive_site] = hive_config create_params.properties.cluster_definition.configurations[mapred_site] = mapred_config create_params.properties.cluster_definition.configurations[yarn_site] = yarn_config create_poller = self.hdinsight_client.clusters.create(resource_group.name, cluster_name, create_params) cluster = create_poller.result() self.validate_cluster(cluster_name, create_params, cluster) hive = self.hdinsight_client.configurations.get(rg_name, cluster_name, hive_site) self.assertEqual(hive, hive_config) mapred = self.hdinsight_client.configurations.get(rg_name, cluster_name, mapred_site) self.assertEqual(mapred, mapred_config) yarn = self.hdinsight_client.configurations.get(rg_name, cluster_name, yarn_site) self.assertEqual(yarn, yarn_config) gateway = self.hdinsight_client.configurations.get(rg_name, cluster_name, gateway) self.assertEqual(len(gateway), 3) core = self.hdinsight_client.configurations.get(rg_name, cluster_name, core_site) self.assertEqual(len(core), 2) self.assertTrue('fs.defaultFS' in core) storage_key_prefix = 'fs.azure.account.key.' self.assertTrue(any(key.startswith(storage_key_prefix) for key in core)) @ResourceGroupPreparer(name_prefix='hdipy-', location=LOCATION) @StorageAccountPreparer(name_prefix='hdipy', location=LOCATION) def test_http_extended(self, resource_group, location, storage_account, storage_account_key): rg_name = resource_group.name cluster_name = self.get_resource_name('hdisdk-http') create_params = self.get_cluster_create_params(location, cluster_name, storage_account, storage_account_key) create_poller = self.hdinsight_client.clusters.create(resource_group.name, cluster_name, create_params) cluster = create_poller.result() self.validate_cluster(cluster_name, create_params, cluster) gateway = 'gateway' user_name = self.cluster_username user_password = self.cluster_password http_settings = self.hdinsight_client.configurations.get(rg_name, cluster_name, gateway) self.validate_http_settings(http_settings, user_name, user_password) new_password = '******' update_params = { 'restAuthCredential.isEnabled': 'true', 'restAuthCredential.username': user_name, 'restAuthCredential.password': new_password } self.hdinsight_client.configurations.update(rg_name, cluster_name, gateway, update_params).wait() http_settings = self.hdinsight_client.configurations.get(rg_name, cluster_name, gateway) self.validate_http_settings(http_settings, user_name, new_password) def test_get_usages(self): usages = self.hdinsight_client.locations.list_usages(LOCATION) self.assertIsNotNone(usages) self.assertIsNotNone(usages.value) for usage in usages.value: self.assertIsNotNone(usage) self.assertIsNotNone(usage.current_value) self.assertIsNotNone(usage.limit) self.assertIsNotNone(usage.name) self.assertIsNotNone(usage.unit) @ResourceGroupPreparer(name_prefix='hdipy-', location=LOCATION) @StorageAccountPreparer(name_prefix='hdipy', location=LOCATION) def test_oms_on_running_cluster(self, resource_group, location, storage_account, storage_account_key): rg_name = resource_group.name cluster_name = self.get_resource_name('hdisdk-oms') create_params = self.get_cluster_create_params(location, cluster_name, storage_account, storage_account_key) create_params.properties.cluster_definition.kind = 'Spark' create_params.properties.cluster_version="3.6" create_poller = self.hdinsight_client.clusters.create(resource_group.name, cluster_name, create_params) cluster = create_poller.result() self.validate_cluster(cluster_name, create_params, cluster) self.hdinsight_client.extensions.enable_monitoring(rg_name, cluster_name, self.workspace_id).wait() monitoring_status = self.hdinsight_client.extensions.get_monitoring_status(rg_name, cluster_name) self.assertTrue(monitoring_status.cluster_monitoring_enabled) self.assertEqual(monitoring_status.workspace_id, self.workspace_id) self.hdinsight_client.extensions.disable_monitoring(rg_name, cluster_name).wait() monitoring_status = self.hdinsight_client.extensions.get_monitoring_status(rg_name, cluster_name) self.assertFalse(monitoring_status.cluster_monitoring_enabled) self.assertIsNone(monitoring_status.workspace_id) @ResourceGroupPreparer(name_prefix='hdipy-', location=LOCATION) @StorageAccountPreparer(name_prefix='hdipy', location=LOCATION) def test_resize_cluster(self, resource_group, location, storage_account, storage_account_key): rg_name = resource_group.name cluster_name = self.get_resource_name('hdisdk-clusterresize') create_params = self.get_cluster_create_params(location, cluster_name, storage_account, storage_account_key) workernode_params = next(item for item in create_params.properties.compute_profile.roles if item.name == 'workernode') create_poller = self.hdinsight_client.clusters.create(resource_group.name, cluster_name, create_params) cluster = create_poller.result() self.validate_cluster(cluster_name, create_params, cluster) cluster = self.hdinsight_client.clusters.get(rg_name, cluster_name) workernode = next(item for item in cluster.properties.compute_profile.roles if item.name == 'workernode') self.assertEqual(workernode_params.target_instance_count, workernode.target_instance_count) self.hdinsight_client.clusters.resize(rg_name, cluster_name, workernode_params.target_instance_count + 1).wait() cluster = self.hdinsight_client.clusters.get(rg_name, cluster_name) workernode = next(item for item in cluster.properties.compute_profile.roles if item.name == 'workernode') self.assertEqual(workernode_params.target_instance_count + 1, workernode.target_instance_count) @ResourceGroupPreparer(name_prefix='hdipy-', location=LOCATION) @StorageAccountPreparer(name_prefix='hdipy', location=LOCATION) def test_script_actions_on_running_cluster(self, resource_group, location, storage_account, storage_account_key): rg_name = resource_group.name cluster_name = self.get_resource_name('hdisdk-scriptactions') create_params = self.get_cluster_create_params(location, cluster_name, storage_account, storage_account_key) create_poller = self.hdinsight_client.clusters.create(resource_group.name, cluster_name, create_params) cluster = create_poller.result() self.validate_cluster(cluster_name, create_params, cluster) install_giraph = "https://hdiconfigactions.blob.core.windows.net/linuxgiraphconfigactionv01/giraph-installer-v01.sh" script_name = "script1" # Execute script actions, and persist on success. script_action_params = self.get_execute_script_action_params(script_name, install_giraph) self.hdinsight_client.clusters.execute_script_actions(rg_name, cluster_name, True, script_action_params).wait() # List script actions and validate script is persisted. script_action_list = list(self.hdinsight_client.script_actions.list_by_cluster(rg_name, cluster_name)) self.assertEqual(1, len(script_action_list)) script_action = script_action_list[0] self.assertEqual(script_action_params[0].name, script_action.name) self.assertEqual(script_action_params[0].uri, script_action.uri) self.assertEqual(script_action_params[0].roles, script_action.roles) # Delete script action. self.hdinsight_client.script_actions.delete(rg_name, cluster_name, script_name) # List script actions and validate script is deleted. script_action_list = list(self.hdinsight_client.script_actions.list_by_cluster(rg_name, cluster_name)) self.assertEqual(0, len(script_action_list)) # List script action history and validate script appears there. list_history_response = list(self.hdinsight_client.script_execution_history.list_by_cluster(rg_name, cluster_name)) self.assertEqual(1, len(list_history_response)) script_action = list_history_response[0] self.assertEqual(1, len(script_action.execution_summary)) self.assertEqual(script_action_params[0].name, script_action.name) self.assertEqual(script_action_params[0].uri, script_action.uri) self.assertEqual(script_action_params[0].roles, script_action.roles) self.assertEqual("Succeeded", script_action.status) # Get the script action by ID and validate it's the same action. script_action = self.hdinsight_client.script_actions.get_execution_detail(rg_name, cluster_name, str(list_history_response[0].script_execution_id)) self.assertEqual(script_action_params[0].name, script_action.name) # Execute script actions, but don't persist on success. script_action_params = self.get_execute_script_action_params("script5baf", install_giraph) self.hdinsight_client.clusters.execute_script_actions(rg_name, cluster_name, False, script_action_params).wait() # List script action history and validate the new script also appears. list_history_response = list(self.hdinsight_client.script_execution_history.list_by_cluster(rg_name, cluster_name)) self.assertEqual(2, len(list_history_response)) script_action = next(a for a in list_history_response if a.name == script_action_params[0].name) self.assertIsNotNone(script_action) self.assertEqual(1, len(script_action.execution_summary)) self.assertEqual(script_action_params[0].name, script_action.name) self.assertEqual(script_action_params[0].uri, script_action.uri) self.assertEqual(script_action_params[0].roles, script_action.roles) self.assertEqual("Succeeded", script_action.status) # Promote non-persisted script. self.hdinsight_client.script_execution_history.promote(rg_name, cluster_name, str(list_history_response[0].script_execution_id)) # List script action list and validate the promoted script is the only one there. script_action_list = list(self.hdinsight_client.script_actions.list_by_cluster(rg_name, cluster_name)) self.assertEqual(1, len(script_action_list)) script_action = script_action_list[0] self.assertEqual(script_action_params[0].name, script_action.name) self.assertEqual(script_action_params[0].uri, script_action.uri) self.assertEqual(script_action_params[0].roles, script_action.roles) # List script action history and validate all three scripts are there. list_history_response = list(self.hdinsight_client.script_execution_history.list_by_cluster(rg_name, cluster_name)) self.assertEqual(2, len(list_history_response)) self.assertTrue(all(a.status == "Succeeded" for a in list_history_response)) def get_execute_script_action_params(self, script_name, script_uri): return [ RuntimeScriptAction( name = script_name, uri = script_uri, roles = ['headnode', 'workernode'] ) ] def get_cluster_create_params_for_adls_gen1(self, location, cluster_name, create_params=None): if create_params is None: create_params = self.get_cluster_create_params(location, cluster_name) cluster_identity = 'clusterIdentity' cluster_identity_config = { 'clusterIdentity.applicationId': self.adls_client_id, 'clusterIdentity.certificate': self.cert_content, 'clusterIdentity.aadTenantId': 'https://login.windows.net/{}'.format(self.tenant_id), 'clusterIdentity.resourceUri': 'https://datalake.azure.net/', 'clusterIdentity.certificatePassword': self.cert_password } create_params.properties.cluster_definition.configurations[cluster_identity] = cluster_identity_config is_default = len(create_params.properties.storage_profile.storageaccounts) == 0 if is_default: core_site = 'core-site' core_config = { 'fs.defaultFS': 'adl://home', 'dfs.adls.home.hostname': '{}.azuredatalakestore.net'.format(self.adls_account_name), 'dfs.adls.home.mountpoint': self.adls_home_mountpoint } create_params.properties.cluster_definition.configurations[core_site] = core_config return create_params def get_cluster_create_params_for_adls_gen2(self, location, cluster_name, storage_account, storage_account_key, create_params=None): if create_params is None: create_params = self.get_cluster_create_params(location, cluster_name) is_default = len(create_params.properties.storage_profile.storageaccounts) == 0 create_params.properties.storage_profile.storageaccounts.append( StorageAccount( name=storage_account.name + STORAGE_ADLS_FILE_SYSTEM_ENDPOINT_SUFFIX, key=storage_account_key, file_system=cluster_name.lower(), is_default= is_default ) ) return create_params def get_cluster_create_params(self, location, cluster_name, storage_account=None, storage_account_key=None): storage_accounts = [] if storage_account: # Specify storage account details only when storage arguments are provided storage_accounts.append( StorageAccount( name=storage_account.name + STORAGE_BLOB_SERVICE_ENDPOINT_SUFFIX, key=storage_account_key, container=cluster_name.lower(), is_default=True ) ) return ClusterCreateParametersExtended( location=location, tags={}, properties=ClusterCreateProperties( cluster_version="3.6", os_type=OSType.linux, tier=Tier.standard, cluster_definition=ClusterDefinition( kind="hadoop", configurations={ "gateway": { "restAuthCredential.isEnabled": "true", "restAuthCredential.username": self.cluster_username, "restAuthCredential.password": self.cluster_password } } ), compute_profile=ComputeProfile( roles=[ Role( name="headnode", target_instance_count=2, hardware_profile=HardwareProfile(vm_size="Large"), os_profile=OsProfile( linux_operating_system_profile=LinuxOperatingSystemProfile( username=self.ssh_username, password=self.ssh_password ) ) ), Role( name="workernode", target_instance_count=3, hardware_profile=HardwareProfile(vm_size="Large"), os_profile=OsProfile( linux_operating_system_profile=LinuxOperatingSystemProfile( username=self.ssh_username, password=self.ssh_password ) ) ), Role( name="zookeepernode", target_instance_count=3, hardware_profile=HardwareProfile(vm_size="Small"), os_profile=OsProfile( linux_operating_system_profile=LinuxOperatingSystemProfile( username=self.ssh_username, password=self.ssh_password ) ) ) ] ), storage_profile=StorageProfile( storageaccounts=storage_accounts ) ) ) def validate_cluster(self, cluster_name, create_parameters, cluster_response): self.assertEqual(cluster_name, cluster_response.name) self.assertEqual(create_parameters.properties.tier, cluster_response.properties.tier) self.assertIsNotNone(cluster_response.etag) self.assertTrue(cluster_response.id.endswith(cluster_name)) self.assertEqual("Running", cluster_response.properties.cluster_state) self.assertEqual("Microsoft.HDInsight/clusters", cluster_response.type) self.assertEqual(create_parameters.location, cluster_response.location) self.assertEqual(create_parameters.tags, cluster_response.tags) self.assertEqual(1, len([endpoint for endpoint in cluster_response.properties.connectivity_endpoints if endpoint.name == "HTTPS"])) self.assertEqual(1, len([endpoint for endpoint in cluster_response.properties.connectivity_endpoints if endpoint.name == "SSH"])) self.assertEqual(create_parameters.properties.os_type, cluster_response.properties.os_type) self.assertIsNone(cluster_response.properties.errors) self.assertEqual(HDInsightClusterProvisioningState.succeeded, cluster_response.properties.provisioning_state) self.assertEqual(create_parameters.properties.cluster_definition.kind, cluster_response.properties.cluster_definition.kind) self.assertEqual(create_parameters.properties.cluster_version, cluster_response.properties.cluster_version[0:3]) self.assertIsNone(cluster_response.properties.cluster_definition.configurations) def validate_http_settings(self, http_settings, expected_user_name, expected_user_password): self.assertIsNotNone(http_settings) self.assertEqual('true', http_settings['restAuthCredential.isEnabled']) self.assertEqual(expected_user_name, http_settings['restAuthCredential.username']) self.assertEqual(expected_user_password, http_settings['restAuthCredential.password']) def _setup_scrubber(self): super(MgmtHDInsightTest, self)._setup_scrubber() constants_to_scrub = ['HDI_ADLS_ACCOUNT_NAME', 'HDI_ADLS_CLIENT_ID'] for key in constants_to_scrub: if hasattr(self.settings, key) and hasattr(self._fake_settings, key): self.scrubber.register_name_pair(getattr(self.settings, key), getattr(self._fake_settings, key))
class AzureRMKeyVaultSecret(AzureRMModuleBase): ''' Module that creates or deletes secrets in Azure KeyVault ''' def __init__(self): self.module_arg_spec = dict( secret_name=dict(type='str', required=True), secret_value=dict(type='str', aliases=['secret'], no_log=True), keyvault_uri=dict(type='str', required=True), state=dict(type='str', default='present', choices=['present', 'absent']) ) required_if = [ ('state', 'present', ['secret_value']) ] self.results = dict( changed=False, state=dict() ) self.secret_name = None self.secret_value = None self.keyvault_uri = None self.state = None self.data_creds = None self.client = None self.tags = None super(AzureRMKeyVaultSecret, self).__init__(self.module_arg_spec, supports_check_mode=True, required_if=required_if, supports_tags=True) def exec_module(self, **kwargs): for key in list(self.module_arg_spec.keys()) + ['tags']: setattr(self, key, kwargs[key]) # Create KeyVault Client using KeyVault auth class and auth_callback self.client = KeyVaultClient(self.azure_credentials) results = dict() changed = False try: results['secret_id'] = self.get_secret(self.secret_name) # Secret exists and will be deleted if self.state == 'absent': changed = True except KeyVaultErrorException: # Secret doesn't exist if self.state == 'present': changed = True self.results['changed'] = changed self.results['state'] = results if not self.check_mode: # Create secret if self.state == 'present' and changed: results['secret_id'] = self.create_secret(self.secret_name, self.secret_value, self.tags) self.results['state'] = results self.results['state']['status'] = 'Created' # Delete secret elif self.state == 'absent' and changed: results['secret_id'] = self.delete_secret(self.secret_name) self.results['state'] = results self.results['state']['status'] = 'Deleted' else: if self.state == 'present' and changed: self.results['state']['status'] = 'Created' elif self.state == 'absent' and changed: self.results['state']['status'] = 'Deleted' return self.results def get_secret(self, name, version=''): ''' Gets an existing secret ''' secret_bundle = self.client.get_secret(self.keyvault_uri, name, version) if secret_bundle: secret_id = KeyVaultId.parse_secret_id(secret_bundle.id) return secret_id.id def create_secret(self, name, secret, tags): ''' Creates a secret ''' secret_bundle = self.client.set_secret(self.keyvault_uri, name, secret, tags) secret_id = KeyVaultId.parse_secret_id(secret_bundle.id) return secret_id.id def delete_secret(self, name): ''' Deletes a secret ''' deleted_secret = self.client.delete_secret(self.keyvault_uri, name) secret_id = KeyVaultId.parse_secret_id(deleted_secret.id) return secret_id.id