def validate_ssh_key(namespace): string_or_file = (namespace.ssh_key_value or os.path.join(os.path.expanduser('~'), '.ssh', 'id_rsa.pub')) content = string_or_file if os.path.exists(string_or_file): logger.info('Use existing SSH public key file: %s', string_or_file) with open(string_or_file, 'r') as f: content = f.read() elif not keys.is_valid_ssh_rsa_public_key(content): if namespace.generate_ssh_keys: # figure out appropriate file names: # 'base_name'(with private keys), and 'base_name.pub'(with public keys) public_key_filepath = string_or_file if public_key_filepath[-4:].lower() == '.pub': private_key_filepath = public_key_filepath[:-4] else: private_key_filepath = public_key_filepath + '.private' content = keys.generate_ssh_keys(private_key_filepath, public_key_filepath) logger.warning("SSH key files '%s' and '%s' have been generated under ~/.ssh to " "allow SSH access to the VM. If using machines without " "permanent storage, back up your keys to a safe location.", private_key_filepath, public_key_filepath) else: raise CLIError('An RSA key file or key value must be supplied to SSH Key Value. ' 'You can use --generate-ssh-keys to let CLI generate one for you') namespace.ssh_key_value = content
def validate_ssh_key(namespace): if hasattr(namespace, 'no_ssh_key') and namespace.no_ssh_key: return string_or_file = (namespace.ssh_key_value or os.path.join( os.path.expanduser('~'), '.ssh', 'id_rsa.pub')) content = string_or_file if os.path.exists(string_or_file): logger.info('Use existing SSH public key file: %s', string_or_file) with open(string_or_file, 'r') as f: content = f.read() elif not keys.is_valid_ssh_rsa_public_key(content): if namespace.generate_ssh_keys: # figure out appropriate file names: # 'base_name'(with private keys), and 'base_name.pub'(with public keys) public_key_filepath = string_or_file if public_key_filepath[-4:].lower() == '.pub': private_key_filepath = public_key_filepath[:-4] else: private_key_filepath = public_key_filepath + '.private' content = keys.generate_ssh_keys(private_key_filepath, public_key_filepath) logger.warning( "SSH key files '%s' and '%s' have been generated under ~/.ssh to " "allow SSH access to the VM. If using machines without " "permanent storage like Azure Cloud Shell without an attached " "file share, back up your keys to a safe location", private_key_filepath, public_key_filepath) else: raise CLIError( 'An RSA key file or key value must be supplied to SSH Key Value. ' 'You can use --generate-ssh-keys to let CLI generate one for you' ) namespace.ssh_key_value = content
def test_generate_specfied_ssh_key_files(self): temp_dir_name = tempfile.mkdtemp(prefix="ssh_dir_") # cleanup temporary directory and its contents self.addCleanup(shutil.rmtree, path=temp_dir_name) # first create file paths for the keys to be generated fd, private_key_file = tempfile.mkstemp(dir=temp_dir_name) os.close(fd) public_key_file = private_key_file + '.pub' os.remove(private_key_file) args = mock.MagicMock() args.ssh_key_name = None args.ssh_key_value = [public_key_file] args.generate_ssh_keys = True # 1 verify we generate key files if not existing validate_ssh_key(args) generated_public_key_string = args.ssh_key_value[0] self.assertTrue(bool(args.ssh_key_value)) self.assertTrue( is_valid_ssh_rsa_public_key(generated_public_key_string)) self.assertTrue(os.path.isfile(private_key_file)) # 2 verify we load existing key files # for convinience we will reuse the generated file in the previous step args2 = mock.MagicMock() args2.ssh_key_name = None args2.ssh_key_value = [generated_public_key_string] args2.generate_ssh_keys = False validate_ssh_key(args2) # we didn't regenerate self.assertEqual(generated_public_key_string, args.ssh_key_value[0]) # 3 verify we do not generate unless told so fd, private_key_file2 = tempfile.mkstemp(dir=temp_dir_name) os.close(fd) public_key_file2 = private_key_file2 + '.pub' args3 = mock.MagicMock() args3.ssh_key_name = None args3.ssh_key_value = [public_key_file2] args3.generate_ssh_keys = False with self.assertRaises(CLIError): validate_ssh_key(args3) # 4 verify file naming if the pub file doesn't end with .pub fd, public_key_file4 = tempfile.mkstemp(dir=temp_dir_name) os.close(fd) public_key_file4 += '1' # make it nonexisting args4 = mock.MagicMock() args4.ssh_key_name = None args4.ssh_key_value = [public_key_file4] args4.generate_ssh_keys = True validate_ssh_key(args4) self.assertTrue(os.path.isfile(public_key_file4 + '.private')) self.assertTrue(os.path.isfile(public_key_file4))
def test_generate_specfied_ssh_key_files(self): temp_dir_name = tempfile.mkdtemp(prefix="ssh_dir_") # cleanup temporary directory and its contents self.addCleanup(shutil.rmtree, path=temp_dir_name) # first create file paths for the keys to be generated fd, private_key_file = tempfile.mkstemp(dir=temp_dir_name) os.close(fd) public_key_file = private_key_file + '.pub' os.remove(private_key_file) args = mock.MagicMock() args.ssh_key_value = public_key_file args.generate_ssh_keys = True # 1 verify we generate key files if not existing validate_ssh_key(args) generated_public_key_string = args.ssh_key_value self.assertTrue(bool(args.ssh_key_value)) self.assertTrue(is_valid_ssh_rsa_public_key(generated_public_key_string)) self.assertTrue(os.path.isfile(private_key_file)) # 2 verify we load existing key files # for convinience we will reuse the generated file in the previous step args2 = mock.MagicMock() args2.ssh_key_value = generated_public_key_string args2.generate_ssh_keys = False validate_ssh_key(args2) # we didn't regenerate self.assertEqual(generated_public_key_string, args.ssh_key_value) # 3 verify we do not generate unless told so fd, private_key_file2 = tempfile.mkstemp(dir=temp_dir_name) os.close(fd) public_key_file2 = private_key_file2 + '.pub' args3 = mock.MagicMock() args3.ssh_key_value = public_key_file2 args3.generate_ssh_keys = False with self.assertRaises(CLIError): validate_ssh_key(args3) # 4 verify file naming if the pub file doesn't end with .pub fd, public_key_file4 = tempfile.mkstemp(dir=temp_dir_name) os.close(fd) public_key_file4 += '1' # make it nonexisting args4 = mock.MagicMock() args4.ssh_key_value = public_key_file4 args4.generate_ssh_keys = True validate_ssh_key(args4) self.assertTrue(os.path.isfile(public_key_file4 + '.private')) self.assertTrue(os.path.isfile(public_key_file4))
def _update_user_account_settings(params, admin_user_name, ssh_key, password): """Update account settings of cluster or file server creation parameters :param models.ClusterCreateParameters or models.FileServerCreateParameters params: params to update :param str or None admin_user_name: name of admin user to create. :param str or None ssh_key: ssh public key value or path to the file containing the key. :param str or None password: password. :return models.ClusterCreateParameters: updated parameters. """ result = copy.deepcopy(params) if hasattr(result, 'user_account_settings'): parent = result else: if result.ssh_configuration is None: result.ssh_configuration = models.SshConfiguration(user_account_settings=None) parent = result.ssh_configuration if parent.user_account_settings is None: parent.user_account_settings = models.UserAccountSettings() # Get effective user name, password and key trying them in the following order: provided via command line, # provided in the config file, current user name and his default public ssh key. effective_user_name = admin_user_name or parent.user_account_settings.admin_user_name or getpass.getuser() effective_password = password or parent.user_account_settings.admin_user_password # Use default ssh public key only if no password is configured. effective_key = (ssh_key or parent.user_account_settings.admin_user_ssh_public_key or (None if effective_password else _get_default_ssh_public_key_location())) if effective_key: if os.path.exists(os.path.expanduser(effective_key)): with open(os.path.expanduser(effective_key)) as f: effective_key = f.read() try: if effective_key and not keys.is_valid_ssh_rsa_public_key(effective_key): raise CLIError('Incorrect ssh public key value.') except Exception: raise CLIError('Incorrect ssh public key value.') parent.user_account_settings.admin_user_name = effective_user_name parent.user_account_settings.admin_user_ssh_public_key = effective_key parent.user_account_settings.admin_user_password = effective_password if not parent.user_account_settings.admin_user_name: raise CLIError('Please provide admin user name.') if (not parent.user_account_settings.admin_user_ssh_public_key and not parent.user_account_settings.admin_user_password): raise CLIError('Please provide admin user password or ssh key.') return result
def _update_user_account_settings(params, admin_user_name, ssh_key, password): """Update account settings of cluster or file server creation parameters :param models.ClusterCreateParameters or models.FileServerCreateParameters params: params to update :param str or None admin_user_name: name of admin user to create. :param str or None ssh_key: ssh public key value or path to the file containing the key. :param str or None password: password. :return models.ClusterCreateParameters: updated parameters. """ result = copy.deepcopy(params) key = ssh_key if ssh_key: if os.path.exists(os.path.expanduser(ssh_key)): with open(os.path.expanduser(ssh_key)) as f: key = f.read() if not is_valid_ssh_rsa_public_key(key): raise CLIError('Incorrect ssh public key value.') if hasattr(result, 'user_account_settings'): parent = result else: if result.ssh_configuration is None: result.ssh_configuration = models.SshConfiguration(None) parent = result.ssh_configuration if parent.user_account_settings is None: parent.user_account_settings = models.UserAccountSettings( admin_user_name=admin_user_name, admin_user_ssh_public_key=key) if admin_user_name: parent.user_account_settings.admin_user_name = admin_user_name if key: parent.user_account_settings.admin_user_ssh_public_key = key if password: parent.user_account_settings.admin_user_password = password if not parent.user_account_settings.admin_user_name: raise CLIError('Please provide admin user name.') if (not parent.user_account_settings.admin_user_ssh_public_key and not parent.user_account_settings.admin_user_password): raise CLIError('Please provide admin user password or ssh key.') return result
def test_generate_specfied_ssh_key_files(self): _, private_key_file = tempfile.mkstemp() public_key_file = private_key_file + '.pub' args = mock.MagicMock() args.ssh_key_value = public_key_file args.generate_ssh_keys = True # 1 verify we generate key files if not existing validate_ssh_key(args) generated_public_key_string = args.ssh_key_value self.assertTrue(bool(args.ssh_key_value)) self.assertTrue( is_valid_ssh_rsa_public_key(generated_public_key_string)) self.assertTrue(os.path.isfile(private_key_file)) # 2 verify we load existing key files # for convinience we will reuse the generated file in the previous step args2 = mock.MagicMock() args2.ssh_key_value = generated_public_key_string args2.generate_ssh_keys = False validate_ssh_key(args2) # we didn't regenerate self.assertEqual(generated_public_key_string, args.ssh_key_value) # 3 verify we do not generate unless told so _, private_key_file2 = tempfile.mkstemp() public_key_file2 = private_key_file2 + '.pub' args3 = mock.MagicMock() args3.ssh_key_value = public_key_file2 args3.generate_ssh_keys = False with self.assertRaises(CLIError): validate_ssh_key(args3) # 4 verify file naming if the pub file doesn't end with .pub _, public_key_file4 = tempfile.mkstemp() public_key_file4 += '1' # make it nonexisting args4 = mock.MagicMock() args4.ssh_key_value = public_key_file4 args4.generate_ssh_keys = True validate_ssh_key(args4) self.assertTrue(os.path.isfile(public_key_file4 + '.private')) self.assertTrue(os.path.isfile(public_key_file4))
def test_generate_specfied_ssh_key_files(self): _, private_key_file = tempfile.mkstemp() public_key_file = private_key_file + '.pub' args = mock.MagicMock() args.ssh_key_value = public_key_file args.generate_ssh_keys = True # 1 verify we generate key files if not existing validate_ssh_key(args) generated_public_key_string = args.ssh_key_value self.assertTrue(bool(args.ssh_key_value)) self.assertTrue(is_valid_ssh_rsa_public_key(generated_public_key_string)) self.assertTrue(os.path.isfile(private_key_file)) # 2 verify we load existing key files # for convinience we will reuse the generated file in the previous step args2 = mock.MagicMock() args2.ssh_key_value = generated_public_key_string args2.generate_ssh_keys = False validate_ssh_key(args2) # we didn't regenerate self.assertEqual(generated_public_key_string, args.ssh_key_value) # 3 verify we do not generate unless told so _, private_key_file2 = tempfile.mkstemp() public_key_file2 = private_key_file2 + '.pub' args3 = mock.MagicMock() args3.ssh_key_value = public_key_file2 args3.generate_ssh_keys = False with self.assertRaises(CLIError): validate_ssh_key(args3) # 4 verify file naming if the pub file doesn't end with .pub _, public_key_file4 = tempfile.mkstemp() public_key_file4 += '1' # make it nonexisting args4 = mock.MagicMock() args4.ssh_key_value = public_key_file4 args4.generate_ssh_keys = True validate_ssh_key(args4) self.assertTrue(os.path.isfile(public_key_file4 + '.private')) self.assertTrue(os.path.isfile(public_key_file4))
def update_user_account_settings(params, admin_user_name, ssh_key, password): """Update account settings of cluster or file server creation parameters :param models.ClusterCreateParameters or models.FileServerCreateParameters params: params to update :param str or None admin_user_name: name of admin user to create. :param str or None ssh_key: ssh public key value or path to the file containing the key. :param str or None password: password. """ key = ssh_key if ssh_key: if os.path.exists(os.path.expanduser(ssh_key)): with open(os.path.expanduser(ssh_key)) as f: key = f.read() if not is_valid_ssh_rsa_public_key(key): raise CLIError('Incorrect ssh public key value.') if hasattr(params, 'user_account_settings'): parent = params else: if params.ssh_configuration is None: params.ssh_configuration = models.SshConfiguration(None) parent = params.ssh_configuration if parent.user_account_settings is None: parent.user_account_settings = models.UserAccountSettings( admin_user_name=admin_user_name, admin_user_ssh_public_key=key) if admin_user_name: parent.user_account_settings.admin_user_name = admin_user_name if key: parent.user_account_settings.admin_user_ssh_public_key = key if password: parent.user_account_settings.admin_user_password = password if not parent.user_account_settings.admin_user_name: raise CLIError('Please provide admin user name.') if (not parent.user_account_settings.admin_user_ssh_public_key and not parent.user_account_settings.admin_user_password): raise CLIError('Please provide admin user password or ssh key.')
def _handle_container_ssh_file(**kwargs): if kwargs['command'] != 'acs create': return args = kwargs['args'] string_or_file = args.ssh_key_value content = string_or_file if os.path.exists(string_or_file): logger.info('Use existing SSH public key file: %s', string_or_file) with open(string_or_file, 'r') as f: content = f.read() elif not is_valid_ssh_rsa_public_key(content) and args.generate_ssh_keys: # figure out appropriate file names: # 'base_name'(with private keys), and 'base_name.pub'(with public keys) public_key_filepath = string_or_file if public_key_filepath[-4:].lower() == '.pub': private_key_filepath = public_key_filepath[:-4] else: private_key_filepath = public_key_filepath + '.private' content = generate_ssh_keys(private_key_filepath, public_key_filepath) logger.warning('Created SSH key files: %s,%s', private_key_filepath, public_key_filepath) args.ssh_key_value = content
def aks_create( cmd, client, resource_group_name, name, ssh_key_value, # pylint: disable=too-many-locals dns_name_prefix=None, location=None, admin_username="******", kubernetes_version='', node_vm_size="Standard_DS2_v2", node_osdisk_size=0, node_count=3, service_principal=None, client_secret=None, no_ssh_key=False, disable_rbac=None, enable_rbac=None, enable_vmss=None, skip_subnet_role_assignment=False, enable_cluster_autoscaler=False, network_plugin=None, pod_cidr=None, service_cidr=None, dns_service_ip=None, docker_bridge_address=None, enable_addons=None, workspace_resource_id=None, min_count=None, max_count=None, vnet_subnet_id=None, max_pods=0, aad_client_app_id=None, aad_server_app_id=None, aad_server_app_secret=None, aad_tenant_id=None, tags=None, generate_ssh_keys=False, # pylint: disable=unused-argument no_wait=False): if not no_ssh_key: try: if not ssh_key_value or not is_valid_ssh_rsa_public_key( ssh_key_value): raise ValueError() except (TypeError, ValueError): shortened_key = truncate_text(ssh_key_value) raise CLIError( 'Provided ssh key ({}) is invalid or non-existent'.format( shortened_key)) subscription_id = _get_subscription_id(cmd.cli_ctx) if not dns_name_prefix: dns_name_prefix = _get_default_dns_prefix(name, resource_group_name, subscription_id) rg_location = _get_rg_location(cmd.cli_ctx, resource_group_name) if location is None: location = rg_location agent_pool_profile = ManagedClusterAgentPoolProfile( name='nodepool1', # Must be 12 chars or less before ACS RP adds to it count=int(node_count), vm_size=node_vm_size, os_type="Linux", vnet_subnet_id=vnet_subnet_id, max_pods=int(max_pods) if max_pods else None) if enable_vmss: agent_pool_profile.type = "VirtualMachineScaleSets" if node_osdisk_size: agent_pool_profile.os_disk_size_gb = int(node_osdisk_size) _check_cluster_autoscaler_flag(enable_cluster_autoscaler, min_count, max_count, node_count, agent_pool_profile) linux_profile = None # LinuxProfile is just used for SSH access to VMs, so omit it if --no-ssh-key was specified. if not no_ssh_key: ssh_config = ContainerServiceSshConfiguration( public_keys=[ContainerServiceSshPublicKey(key_data=ssh_key_value)]) linux_profile = ContainerServiceLinuxProfile( admin_username=admin_username, ssh=ssh_config) principal_obj = _ensure_aks_service_principal( cmd.cli_ctx, service_principal=service_principal, client_secret=client_secret, subscription_id=subscription_id, dns_name_prefix=dns_name_prefix, location=location, name=name) service_principal_profile = ManagedClusterServicePrincipalProfile( client_id=principal_obj.get("service_principal"), secret=principal_obj.get("client_secret")) if (vnet_subnet_id and not skip_subnet_role_assignment and not subnet_role_assignment_exists(cmd.cli_ctx, vnet_subnet_id)): scope = vnet_subnet_id if not _add_role_assignment( cmd.cli_ctx, 'Network Contributor', service_principal, scope=scope): logger.warning('Could not create a role assignment for subnet. ' 'Are you an Owner on this subscription?') network_profile = None if any([ network_plugin, pod_cidr, service_cidr, dns_service_ip, docker_bridge_address ]): network_profile = ContainerServiceNetworkProfile( network_plugin=network_plugin, pod_cidr=pod_cidr, service_cidr=service_cidr, dns_service_ip=dns_service_ip, docker_bridge_cidr=docker_bridge_address) addon_profiles = _handle_addons_args(cmd, enable_addons, subscription_id, resource_group_name, {}, workspace_resource_id) if 'omsagent' in addon_profiles: _ensure_container_insights_for_monitoring(cmd, addon_profiles['omsagent']) aad_profile = None if any([ aad_client_app_id, aad_server_app_id, aad_server_app_secret, aad_tenant_id ]): aad_profile = ManagedClusterAADProfile( client_app_id=aad_client_app_id, server_app_id=aad_server_app_id, server_app_secret=aad_server_app_secret, tenant_id=aad_tenant_id) # Check that both --disable-rbac and --enable-rbac weren't provided if all([disable_rbac, enable_rbac]): raise CLIError( 'specify either "--disable-rbac" or "--enable-rbac", not both.') mc = ManagedCluster(location=location, tags=tags, dns_prefix=dns_name_prefix, kubernetes_version=kubernetes_version, enable_rbac=False if disable_rbac else True, agent_pool_profiles=[agent_pool_profile], linux_profile=linux_profile, service_principal_profile=service_principal_profile, network_profile=network_profile, addon_profiles=addon_profiles, aad_profile=aad_profile) # Due to SPN replication latency, we do a few retries here max_retry = 30 retry_exception = Exception(None) for _ in range(0, max_retry): try: return sdk_no_wait(no_wait, client.managed_clusters.create_or_update, resource_group_name=resource_group_name, resource_name=name, parameters=mc) except CloudError as ex: retry_exception = ex if 'not found in Active Directory tenant' in ex.message: time.sleep(3) else: raise ex raise retry_exception