def get_container_access_type(cli_ctx, name): if name == 'off': return None if name == 'blob': return get_sdk(cli_ctx, CUSTOM_DATA_STORAGE, 'PublicAccess', mod='blob.models').Blob if name == 'container': return get_sdk(cli_ctx, CUSTOM_DATA_STORAGE, 'PublicAccess', mod='blob.models').Container raise KeyError
def get_container_access_type(cli_ctx, name): if name == 'off': return None elif name == 'blob': return get_sdk(cli_ctx, ResourceType.DATA_STORAGE, 'PublicAccess', mod='blob.models').Blob elif name == 'container': return get_sdk(cli_ctx, ResourceType.DATA_STORAGE, 'PublicAccess', mod='blob.models').Container else: raise KeyError
def _create_network_rule_set(cmd, bypass=None, default_action=None): NetworkRuleSet = cmd.get_models('NetworkRuleSet', resource_type=ResourceType.MGMT_KEYVAULT) NetworkRuleBypassOptions = get_sdk(cmd.cli_ctx, ResourceType.MGMT_KEYVAULT, 'models.key_vault_management_client_enums#NetworkRuleBypassOptions') NetworkRuleAction = get_sdk(cmd.cli_ctx, ResourceType.MGMT_KEYVAULT, 'models.key_vault_management_client_enums#NetworkRuleAction') return NetworkRuleSet(bypass=bypass or NetworkRuleBypassOptions.azure_services.value, default_action=default_action or NetworkRuleAction.allow.value)
def get_blob_service_by_type(cli_ctx, blob_type): type_to_service = { 'block': lambda ctx: get_sdk(ctx, CUSTOM_DATA_STORAGE, 'BlockBlobService', mod='blob'), 'page': lambda ctx: get_sdk(ctx, CUSTOM_DATA_STORAGE, 'PageBlobService', mod='blob'), 'append': lambda ctx: get_sdk(ctx, CUSTOM_DATA_STORAGE, 'AppendBlobService', mod='blob') } try: return type_to_service[blob_type](cli_ctx) except KeyError: return None
def create_short_lived_share_sas(account_name, account_key, share): from datetime import datetime, timedelta if supported_api_version(ResourceType.DATA_STORAGE, min_api='2017-04-17'): SharedAccessSignature = get_sdk(ResourceType.DATA_STORAGE, 'FileSharedAccessSignature', mod='file.sharedaccesssignature') else: SharedAccessSignature = get_sdk(ResourceType.DATA_STORAGE, 'SharedAccessSignature', mod='sharedaccesssignature') FilePermission = get_sdk(ResourceType.DATA_STORAGE, 'file.models#FilePermissions') expiry = (datetime.utcnow() + timedelta(days=1)).strftime('%Y-%m-%dT%H:%M:%SZ') sas = SharedAccessSignature(account_name, account_key) return sas.generate_share(share, permission=FilePermission(read=True), expiry=expiry, protocol='https')
def _create_short_lived_blob_sas(account_name, account_key, container, blob): if supported_api_version(ResourceType.DATA_STORAGE, min_api='2017-04-17'): SharedAccessSignature = get_sdk(ResourceType.DATA_STORAGE, 'BlobSharedAccessSignature', mod='blob.sharedaccesssignature') else: SharedAccessSignature = get_sdk(ResourceType.DATA_STORAGE, 'SharedAccessSignature', mod='sharedaccesssignature') BlobPermissions = get_sdk(ResourceType.DATA_STORAGE, 'blob.models#BlobPermissions') expiry = (datetime.utcnow() + timedelta(days=1)).strftime('%Y-%m-%dT%H:%M:%SZ') sas = SharedAccessSignature(account_name, account_key) return sas.generate_blob(container, blob, permission=BlobPermissions(read=True), expiry=expiry, protocol='https')
def _create_short_lived_file_sas(account_name, account_key, share, directory_name, file_name): if supported_api_version(ResourceType.DATA_STORAGE, min_api='2017-04-17'): SharedAccessSignature = get_sdk(ResourceType.DATA_STORAGE, 'FileSharedAccessSignature', mod='file.sharedaccesssignature') else: SharedAccessSignature = get_sdk(ResourceType.DATA_STORAGE, 'SharedAccessSignature', mod='sharedaccesssignature') BlobPermissions = get_sdk(ResourceType.DATA_STORAGE, 'blob.models#BlobPermissions') # if dir is empty string change it to None directory_name = directory_name if directory_name else None expiry = (datetime.utcnow() + timedelta(days=1)).strftime('%Y-%m-%dT%H:%M:%SZ') sas = SharedAccessSignature(account_name, account_key) return sas.generate_file(share, directory_name=directory_name, file_name=file_name, permission=BlobPermissions(read=True), expiry=expiry, protocol='https')
def _build_tenant_level_accounts(tenants): from azure.cli.core.profiles import get_sdk, ResourceType SubscriptionType = get_sdk(ResourceType.MGMT_RESOURCE_SUBSCRIPTIONS, 'Subscription', mod='models') StateType = get_sdk(ResourceType.MGMT_RESOURCE_SUBSCRIPTIONS, 'SubscriptionState', mod='models') result = [] for t in tenants: s = SubscriptionType() s.id = '/subscriptions/' + t s.subscription = t s.tenant_id = t s.display_name = 'N/A(tenant level account)' s.state = StateType.enabled result.append(s) return result
def resource_type_type(string): ''' Validates that resource types string contains only a combination of (s)ervice, (c)ontainer, (o)bject ''' ResourceTypes = get_sdk(ResourceType.DATA_STORAGE, 'models#ResourceTypes') if set(string) - set("sco"): raise ValueError return ResourceTypes(_str=''.join(set(string)))
def services_type(string): ''' Validates that services string contains only a combination of (b)lob, (q)ueue, (t)able, (f)ile ''' Services = get_sdk(ResourceType.DATA_STORAGE, 'models#Services') if set(string) - set("bqtf"): raise ValueError return Services(_str=''.join(set(string)))
def set_logging(self, read, write, delete, retention, timeout=None): t_logging, t_retention_policy = get_sdk(self.cli_ctx, CUSTOM_DATA_STORAGE, 'Logging', 'RetentionPolicy', mod='common.models') retention_policy = t_retention_policy(enabled=retention != 0, days=retention) logging = t_logging(delete, read, write, retention_policy) return self.set_service_properties()(logging=logging, timeout=timeout)
def _deploy_arm_template_core(resource_group_name, # pylint: disable=too-many-arguments template_file=None, template_uri=None, deployment_name=None, parameters=None, mode='incremental', validate_only=False, no_wait=False): DeploymentProperties, TemplateLink = get_sdk(ResourceType.MGMT_RESOURCE_RESOURCES, 'DeploymentProperties', 'TemplateLink', mod='models') parameters = parameters or {} template = None template_link = None template_obj = None if template_uri: template_link = TemplateLink(uri=template_uri) template_obj = shell_safe_json_parse(_urlretrieve(template_uri).decode('utf-8')) else: template = get_file_json(template_file) template_obj = template parameters = _get_missing_parameters(parameters, template_obj, _prompt_for_parameters) properties = DeploymentProperties(template=template, template_link=template_link, parameters=parameters, mode=mode) smc = get_mgmt_service_client(ResourceType.MGMT_RESOURCE_RESOURCES) if validate_only: return smc.deployments.validate(resource_group_name, deployment_name, properties, raw=no_wait) return smc.deployments.create_or_update(resource_group_name, deployment_name, properties, raw=no_wait)
def cloud_storage_account_service_factory(kwargs): CloudStorageAccount = get_sdk(ResourceType.DATA_STORAGE, '#CloudStorageAccount') account_name = kwargs.pop('account_name', None) account_key = kwargs.pop('account_key', None) sas_token = kwargs.pop('sas_token', None) kwargs.pop('connection_string', None) return CloudStorageAccount(account_name, account_key, sas_token)
def get_asg_validator(dest): ApplicationSecurityGroup = get_sdk(ResourceType.MGMT_NETWORK, 'ApplicationSecurityGroup', mod='models') def _validate_asg_name_or_id(namespace): subscription_id = get_subscription_id() resource_group = namespace.resource_group_name names_or_ids = getattr(namespace, dest) ids = [] if names_or_ids == [""] or not names_or_ids: return for val in names_or_ids: if not is_valid_resource_id(val): val = resource_id( subscription=subscription_id, resource_group=resource_group, namespace='Microsoft.Network', type='applicationSecurityGroups', name=val ) ids.append(ApplicationSecurityGroup(id=val)) setattr(namespace, dest, ids) return _validate_asg_name_or_id
def enum_default(resource_type, enum_name, enum_val_name): mod = get_sdk(resource_type, enum_name, mod='models') try: return getattr(mod, enum_val_name).value except AttributeError: logger.debug('Skipping param default %s.%s for %s.', enum_name, enum_val_name, resource_type) return None
def create_role_assignment(self, client, assignment_name, role_id, object_id, scope): RoleAssignmentCreateParameters = get_sdk( self.cli_ctx, ResourceType.MGMT_AUTHORIZATION, 'RoleAssignmentProperties' if self.old_api else 'RoleAssignmentCreateParameters', mod='models', operation_group='role_assignments') parameters = RoleAssignmentCreateParameters(role_definition_id=role_id, principal_id=object_id) return client.create(scope, assignment_name, parameters)
def _new_account(self): from azure.cli.core.profiles import ResourceType, get_sdk SubscriptionType, StateType = get_sdk(self.cli_ctx, ResourceType.MGMT_RESOURCE_SUBSCRIPTIONS, 'Subscription', 'SubscriptionState', mod='models') s = SubscriptionType() s.state = StateType.enabled return s
def register_common_storage_account_options(context): context.reg_arg('https_only', help='Allows https traffic only to storage service.', **three_state_flag()) context.reg_arg('sku', help='The storage account SKU.', **model_choice_list(ResourceType.MGMT_STORAGE, 'SkuName')) context.reg_arg('access_tier', help='The access tier used for billing StandardBlob accounts. Cannot be set for ' 'StandardLRS, StandardGRS, StandardRAGRS, or PremiumLRS account types. It is ' 'required for StandardBlob accounts during creation', **model_choice_list(ResourceType.MGMT_STORAGE, 'AccessTier')) # after API 2016-12-01 if supported_api_version(resource_type=ResourceType.MGMT_STORAGE, min_api='2016-12-01'): encryption_services_model = get_sdk(ResourceType.MGMT_STORAGE, 'models#EncryptionServices') if encryption_services_model: encryption_choices = [] for attribute in encryption_services_model._attribute_map.keys(): # pylint: disable=protected-access if not encryption_services_model._validation.get(attribute, {}).get('readonly'): # pylint: disable=protected-access # skip readonly attributes, which are not for input encryption_choices.append(attribute) context.reg_arg('encryption_services', nargs='+', help='Specifies which service(s) to encrypt.', validator=validate_encryption_services, **enum_choice_list(encryption_choices)) # after API 2017-06-01 if supported_api_version(resource_type=ResourceType.MGMT_STORAGE, min_api='2017-06-01'): context.reg_arg('assign_identity', action='store_true', help='Generate and assign a new Storage Account Identity for this storage account for use with ' 'key management services like Azure KeyVault.') # the options of encryption key sources are hardcoded since there isn't a enum represents them in the SDK. context.reg_arg('encryption_key_source', help='The encryption keySource (provider). Default: Microsoft.Storage', validator=validate_encryption_source, **enum_choice_list(['Microsoft.Storage', 'Microsoft.Keyvault']))
def _dont_fail_not_exist(ex): AzureMissingResourceHttpError = \ get_sdk(ResourceType.DATA_STORAGE, '_error#AzureMissingResourceHttpError') if isinstance(ex, AzureMissingResourceHttpError): return None else: raise ex
def multi_service_properties_factory(kwargs): """Create multiple data services properties instance based on the services option""" from .services_wrapper import ServiceProperties BaseBlobService, FileService, TableService, QueueService, = \ get_sdk(ResourceType.DATA_STORAGE, 'blob.baseblobservice#BaseBlobService', 'file#FileService', 'table#TableService', 'queue#QueueService') account_name = kwargs.pop('account_name', None) account_key = kwargs.pop('account_key', None) connection_string = kwargs.pop('connection_string', None) sas_token = kwargs.pop('sas_token', None) services = kwargs.pop('services', []) def get_creator(name, service_type): return lambda: ServiceProperties(name, service_type, account_name, account_key, connection_string, sas_token) creators = { 'b': get_creator('blob', BaseBlobService), 'f': get_creator('file', FileService), 'q': get_creator('queue', QueueService), 't': get_creator('table', TableService) } return [creators[s]() for s in services]
def get_vnet_validator(dest): SubResource = get_sdk(ResourceType.MGMT_NETWORK, 'SubResource', mod='models') subscription_id = get_subscription_id() def _validate_vnet_name_or_id(namespace): resource_group = namespace.resource_group_name names_or_ids = getattr(namespace, dest) ids = [] if names_or_ids == [""] or not names_or_ids: return for val in names_or_ids: if not is_valid_resource_id(val): val = resource_id( subscription=subscription_id, resource_group=resource_group, namespace='Microsoft.Network', type='virtualNetworks', name=val ) ids.append(SubResource(id=val)) setattr(namespace, dest, ids) return _validate_vnet_name_or_id
def cloud_storage_account_service_factory(cli_ctx, kwargs): t_cloud_storage_account = get_sdk(cli_ctx, CUSTOM_DATA_STORAGE, 'common#CloudStorageAccount') account_name = kwargs.pop('account_name', None) account_key = kwargs.pop('account_key', None) sas_token = kwargs.pop('sas_token', None) kwargs.pop('connection_string', None) return t_cloud_storage_account(account_name, account_key, sas_token)
def test_create_for_rbac_with_new_kv_cert(self, resource_group, key_vault): KeyVaultErrorException = get_sdk(self.cli_ctx, ResourceType.DATA_KEYVAULT, 'models.key_vault_error#KeyVaultErrorException') subscription_id = self.get_subscription_id() self.kwargs.update({ 'sp': 'http://{}'.format(resource_group), 'sub': subscription_id, 'scope': '/subscriptions/{}'.format(subscription_id), 'cert': 'cert1', 'kv': key_vault }) time.sleep(5) # to avoid 504(too many requests) on a newly created vault try: with mock.patch('azure.cli.command_modules.role.custom._gen_guid', side_effect=self.create_guid): try: self.cmd('ad sp create-for-rbac --scopes {scope}/resourceGroups/{rg} --create-cert --keyvault {kv} --cert {cert} -n {sp}') except KeyVaultErrorException: if not self.is_live and not self.in_recording: pass # temporary workaround for keyvault challenge handling was ignored under playback else: raise cer1 = self.cmd('keyvault certificate show --vault-name {kv} -n {cert}').get_output_in_json()['cer'] self.cmd('ad sp credential reset -n {sp} --create-cert --keyvault {kv} --cert {cert}') cer2 = self.cmd('keyvault certificate show --vault-name {kv} -n {cert}').get_output_in_json()['cer'] self.assertTrue(cer1 != cer2) finally: self.cmd('ad app delete --id {sp}')
def validate_encryption_source(namespace): ns = vars(namespace) if namespace.encryption_key_source: allowed_options = ['Microsoft.Storage', 'Microsoft.Keyvault'] if namespace.encryption_key_source not in allowed_options: raise ValueError('--encryption-key-source allows to values: {}'.format(', '.join(allowed_options))) key_name = ns.pop('encryption_key_name', None) key_version = ns.pop('encryption_key_version', None) key_vault_uri = ns.pop('encryption_key_vault', None) if namespace.encryption_key_source == 'Microsoft.Keyvault' and not (key_name and key_version and key_vault_uri): raise ValueError('--encryption-key-name, --encryption-key-vault, and --encryption-key-version are required ' 'when --encryption-key-source=Microsoft.Keyvault is specified.') if key_name or key_version or key_vault_uri: if namespace.encryption_key_source != 'Microsoft.Keyvault': raise ValueError('--encryption-key-name, --encryption-key-vault, and --encryption-key-version are not ' 'applicable when --encryption-key-source=Microsoft.Keyvault is not specified.') KeyVaultProperties = get_sdk(ResourceType.MGMT_STORAGE, 'KeyVaultProperties', mod='models') if not KeyVaultProperties: return kv_prop = KeyVaultProperties(key_name, key_version, key_vault_uri) namespace.encryption_key_vault_properties = kv_prop
def get_data_service_client(cli_ctx, service_type, account_name, account_key, connection_string=None, sas_token=None, socket_timeout=None, token_credential=None, endpoint_suffix=None): logger.debug('Getting data service client service_type=%s', service_type.__name__) try: client_kwargs = {'account_name': account_name, 'account_key': account_key, 'connection_string': connection_string, 'sas_token': sas_token} if socket_timeout: client_kwargs['socket_timeout'] = socket_timeout if token_credential: client_kwargs['token_credential'] = token_credential if endpoint_suffix: client_kwargs['endpoint_suffix'] = endpoint_suffix client = service_type(**client_kwargs) except ValueError as exc: _ERROR_STORAGE_MISSING_INFO = get_sdk(cli_ctx, ResourceType.DATA_STORAGE, 'common._error#_ERROR_STORAGE_MISSING_INFO') if _ERROR_STORAGE_MISSING_INFO in str(exc): raise ValueError(exc) else: raise CLIError('Unable to obtain data client. Check your connection parameters.') # TODO: enable Fiddler client.request_callback = _get_add_headers_callback(cli_ctx) return client
def deploy_arm_template(cli_ctx, resource_group_name, # pylint: disable=too-many-arguments template_file=None, deployment_name=None, parameters=None, mode=None): DeploymentProperties, _ = get_sdk(cli_ctx, ResourceType.MGMT_RESOURCE_RESOURCES, 'DeploymentProperties', 'TemplateLink', mod='models') template = {} # TODO: get_file_json() can return None if specified, otherwise it can throw an error. template = get_file_json(template_file, preserve_order=True) template_obj = template # So template should always be a dict, otherwise this next line will fail. template_obj['resources'] = template_obj.get('resources', []) # template_obj is not used after this point, can remove it. parameters = BotTemplateDeployer.__process_parameters(parameters) or {} # Turn the template into JSON string, then load it back to a dict, list, etc. template = json.loads(json.dumps(template)) parameters = json.loads(json.dumps(parameters)) properties = DeploymentProperties(template=template, template_link=None, parameters=parameters, mode=mode) smc = get_mgmt_service_client(cli_ctx, ResourceType.MGMT_RESOURCE_RESOURCES) return smc.deployments.create_or_update(resource_group_name, deployment_name, properties, raw=False)
def test_batch_byos_account_cmd(self, resource_group): SecretPermissions = get_sdk(self.cli_ctx, ResourceType.MGMT_KEYVAULT, 'models.key_vault_management_client_enums#SecretPermissions') KeyPermissions = get_sdk(self.cli_ctx, ResourceType.MGMT_KEYVAULT, 'models.key_vault_management_client_enums#KeyPermissions') ALL_SECRET_PERMISSIONS = ' '.join( [perm.value for perm in SecretPermissions]) ALL_KEY_PERMISSIONS = ' '.join([perm.value for perm in KeyPermissions]) self.kwargs.update({ 'rg': resource_group, 'str_n': 'clibatchteststorage1', 'byos_n': 'clibatchtestuser1', 'byos_l': 'southindia', 'kv': 'clibatchtestkeyvault1', 'obj_id': 'f520d84c-3fd3-4cc8-88d4-2ed25b00d27a', 'perm_k': ALL_KEY_PERMISSIONS, 'perm_s': ALL_SECRET_PERMISSIONS }) # test create keyvault for use with BYOS account self.cmd( 'keyvault create -g {rg} -n {kv} -l {byos_l} --enabled-for-deployment true --enabled-for' '-disk-encryption true --enabled-for-template-deployment true').assert_with_checks( [ self.check('name', '{kv}'), self.check('location', '{byos_l}'), self.check('resourceGroup', '{rg}'), self.check('type(properties.accessPolicies)', 'array'), self.check('length(properties.accessPolicies)', 1), self.check('properties.sku.name', 'standard')]) self.cmd('keyvault set-policy -g {rg} -n {kv} --object-id {obj_id} ' '--key-permissions {perm_k} --secret-permissions {perm_s}') time.sleep(100) # test create account with BYOS self.cmd( 'batch account create -g {rg} -n {byos_n} -l {byos_l} --keyvault {kv}').assert_with_checks( [ self.check('name', '{byos_n}'), self.check('location', '{byos_l}'), self.check('resourceGroup', '{rg}')]) # test batch account delete self.cmd('batch account delete -g {rg} -n {byos_n} --yes') self.cmd('batch account list -g {rg}').assert_with_checks(self.is_empty())
def validator(namespace): BaseBlobService, FileService, BlobContentSettings, FileContentSettings = get_sdk( ResourceType.DATA_STORAGE, 'blob.baseblobservice#BaseBlobService', 'file#FileService', 'blob.models#ContentSettings', 'file.models#ContentSettings') # must run certain validators first for an update if update: validate_client_parameters(namespace) if update and _class_name(settings_class) == _class_name(FileContentSettings): get_file_path_validator()(namespace) ns = vars(namespace) # retrieve the existing object properties for an update if update: account = ns.get('account_name') key = ns.get('account_key') cs = ns.get('connection_string') sas = ns.get('sas_token') if _class_name(settings_class) == _class_name(BlobContentSettings): client = get_storage_data_service_client(BaseBlobService, account, key, cs, sas) container = ns.get('container_name') blob = ns.get('blob_name') lease_id = ns.get('lease_id') props = client.get_blob_properties(container, blob, lease_id=lease_id).properties.content_settings elif _class_name(settings_class) == _class_name(FileContentSettings): client = get_storage_data_service_client(FileService, account, key, cs, sas) share = ns.get('share_name') directory = ns.get('directory_name') filename = ns.get('file_name') props = client.get_file_properties(share, directory, filename).properties.content_settings # create new properties new_props = settings_class( content_type=ns.pop('content_type', None), content_disposition=ns.pop('content_disposition', None), content_encoding=ns.pop('content_encoding', None), content_language=ns.pop('content_language', None), content_md5=ns.pop('content_md5', None), cache_control=ns.pop('content_cache_control', None) ) # if update, fill in any None values with existing if update: new_props.content_type = new_props.content_type or props.content_type new_props.content_disposition = new_props.content_disposition or props.content_disposition new_props.content_encoding = new_props.content_encoding or props.content_encoding new_props.content_language = new_props.content_language or props.content_language new_props.content_md5 = new_props.content_md5 or props.content_md5 new_props.cache_control = new_props.cache_control or props.cache_control ns['content_settings'] = new_props namespace = argparse.Namespace(**ns)
def validate_included_datasets(namespace): if namespace.include: include = namespace.include if set(include) - set('cms'): help_string = '(c)opy-info (m)etadata (s)napshots' raise ValueError('valid values are {} or a combination thereof.'.format(help_string)) Include = get_sdk(ResourceType.DATA_STORAGE, 'blob#Include') namespace.include = Include('s' in include, 'm' in include, False, 'c' in include)
def table_permission_validator(namespace): """ A special case for table because the SDK associates the QUERY permission with 'r' """ TablePermissions = get_sdk(ResourceType.DATA_STORAGE, 'table#TablePermissions') if namespace.permission: if set(namespace.permission) - set('raud'): help_string = '(r)ead/query (a)dd (u)pdate (d)elete' raise ValueError('valid values are {} or a combination thereof.'.format(help_string)) namespace.permission = TablePermissions(_str=namespace.permission)
def test_create_for_rbac_with_new_kv_cert(self, resource_group, key_vault): KeyVaultErrorException = get_sdk( self.cli_ctx, ResourceType.DATA_KEYVAULT, 'models.key_vault_error#KeyVaultErrorException') subscription_id = self.get_subscription_id() self.kwargs.update({ 'sp': 'http://{}'.format(resource_group), 'display_name': resource_group, 'sub': subscription_id, 'scope': '/subscriptions/{}'.format(subscription_id), 'cert': 'cert1', 'kv': key_vault }) time.sleep( 5) # to avoid 504(too many requests) on a newly created vault try: with mock.patch('azure.cli.command_modules.role.custom._gen_guid', side_effect=self.create_guid): try: self.cmd( 'ad sp create-for-rbac --scopes {scope}/resourceGroups/{rg} --create-cert --keyvault {kv} --cert {cert} -n {display_name}' ) except KeyVaultErrorException: if not self.is_live and not self.in_recording: pass # temporary workaround for keyvault challenge handling was ignored under playback else: raise cer1 = self.cmd( 'keyvault certificate show --vault-name {kv} -n {cert}' ).get_output_in_json()['cer'] self.cmd( 'ad sp credential reset -n {sp} --create-cert --keyvault {kv} --cert {cert}' ) cer2 = self.cmd( 'keyvault certificate show --vault-name {kv} -n {cert}' ).get_output_in_json()['cer'] self.assertTrue(cer1 != cer2) finally: self.cmd('ad app delete --id {sp}')
def transformer(result): if getattr(result, 'next_marker', None): logger.warning('Next Marker:') logger.warning(result.next_marker) t_file, t_dir = get_sdk(cli_ctx, ResourceType.DATA_STORAGE, 'File', 'Directory', mod='file.models') return_list = [] for each in result: if isinstance(each, t_file): delattr(each, 'content') setattr(each, 'type', 'file') elif isinstance(each, t_dir): setattr(each, 'type', 'dir') return_list.append(each) return return_list
def generic_data_service_factory(cli_ctx, service, name=None, key=None, connection_string=None, sas_token=None, socket_timeout=None): try: return get_storage_data_service_client(cli_ctx, service, name, key, connection_string, sas_token, socket_timeout) except ValueError as val_exception: _ERROR_STORAGE_MISSING_INFO = get_sdk( cli_ctx, ResourceType.DATA_STORAGE, 'common._error#_ERROR_STORAGE_MISSING_INFO') message = str(val_exception) if message == _ERROR_STORAGE_MISSING_INFO: message = NO_CREDENTIALS_ERROR_MESSAGE from knack.util import CLIError raise CLIError(message)
def validate_public_access(namespace): BaseBlobService = get_sdk(ResourceType.DATA_STORAGE, 'blob.baseblobservice#BaseBlobService') from ._params import public_access_types if namespace.public_access: namespace.public_access = public_access_types[namespace.public_access.lower()] if hasattr(namespace, 'signed_identifiers'): # must retrieve the existing ACL to simulate a patch operation because these calls # are needlessly conflated ns = vars(namespace) validate_client_parameters(namespace) account = ns.get('account_name') key = ns.get('account_key') cs = ns.get('connection_string') sas = ns.get('sas_token') client = get_storage_data_service_client(BaseBlobService, account, key, cs, sas) container = ns.get('container_name') lease_id = ns.get('lease_id') ns['signed_identifiers'] = client.get_container_acl(container, lease_id=lease_id)
def _query_account_key(cli_ctx, account_name): """Query the storage account key. This is used when the customer doesn't offer account key but name.""" scf = get_mgmt_service_client(cli_ctx, ResourceType.MGMT_STORAGE) acc = next((x for x in scf.storage_accounts.list() if x.name == account_name), None) if acc: from msrestazure.tools import parse_resource_id rg = parse_resource_id(acc.id)['resource_group'] t_storage_account_keys, t_storage_account_list_keys_results = get_sdk( cli_ctx, ResourceType.MGMT_STORAGE, 'models.storage_account_keys#StorageAccountKeys', 'models.storage_account_list_keys_result#StorageAccountListKeysResult') if t_storage_account_keys: return scf.storage_accounts.list_keys(rg, account_name).key1 elif t_storage_account_list_keys_results: return scf.storage_accounts.list_keys(rg, account_name).keys[0].value # pylint: disable=no-member else: raise ValueError("Storage account '{}' not found.".format(account_name))
def cf_share_service(cli_ctx, kwargs): t_share_service = get_sdk(cli_ctx, ResourceType.DATA_STORAGE_FILESHARE, '_share_service_client#ShareServiceClient') connection_string = kwargs.pop('connection_string', None) account_key = kwargs.pop('account_key', None) token_credential = kwargs.pop('token_credential', None) sas_token = kwargs.pop('sas_token', None) account_name = kwargs.pop('account_name', None) if connection_string: return t_share_service.from_connection_string( conn_str=connection_string) account_url = get_account_url(cli_ctx, account_name=account_name, service='file') credential = account_key or sas_token or token_credential if account_url and credential: return t_share_service(account_url=account_url, credential=credential) return None
def cf_adls_service(cli_ctx, kwargs): client_kwargs = {} t_adls_service = get_sdk(cli_ctx, CUSTOM_DATA_STORAGE_FILEDATALAKE, '_data_lake_service_client#DataLakeServiceClient') connection_string = kwargs.pop('connection_string', None) account_name = kwargs.pop('account_name', None) account_key = kwargs.pop('account_key', None) token_credential = kwargs.pop('token_credential', None) sas_token = kwargs.pop('sas_token', None) # Enable NetworkTraceLoggingPolicy which logs all headers (except Authorization) without being redacted client_kwargs['logging_enable'] = True if connection_string: return t_adls_service.from_connection_string(conn_str=connection_string, **client_kwargs) account_url = get_account_url(cli_ctx, account_name=account_name, service='dfs') credential = account_key or sas_token or token_credential if account_url and credential: return t_adls_service(account_url=account_url, credential=credential, **client_kwargs) return None
def _create_role_assignment(cli_ctx, role, assignee_object_id, scope): from azure.cli.core.profiles import ResourceType, get_sdk factory = _auth_client_factory(cli_ctx, scope) assignments_client = factory.role_assignments definitions_client = factory.role_definitions role_id = _resolve_role_id(role, scope, definitions_client) RoleAssignmentCreateParameters = get_sdk( cli_ctx, ResourceType.MGMT_AUTHORIZATION, 'RoleAssignmentCreateParameters', mod='models', operation_group='role_assignments') parameters = RoleAssignmentCreateParameters( role_definition_id=role_id, principal_id=assignee_object_id) return assignments_client.create(scope=scope, role_assignment_name=_gen_guid(), parameters=parameters)
def handler(ex): from azure.cli.core.profiles import get_sdk from knack.log import get_logger logger = get_logger(__name__) t_error = get_sdk(self.command_loader.cli_ctx, CUSTOM_DATA_STORAGE, 'common._error#AzureHttpError') if isinstance(ex, t_error) and ex.status_code == 403: message = """ You do not have the required permissions needed to perform this operation. Depending on your operation, you may need to be assigned one of the following roles: "Storage Blob Data Contributor (Preview)" "Storage Blob Data Reader (Preview)" "Storage Queue Data Contributor (Preview)" "Storage Queue Data Reader (Preview)" If you want to use the old authentication method and allow querying for the right account key, please use the "--auth-mode" parameter and "key" value. """ logger.error(message) return raise ex
def cf_adls_service(cli_ctx, kwargs): t_adls_service = get_sdk( cli_ctx, ResourceType.DATA_STORAGE_FILEDATALAKE, '_data_lake_service_client#DataLakeServiceClient') connection_string = kwargs.pop('connection_string', None) account_key = kwargs.pop('account_key', None) token_credential = kwargs.pop('token_credential', None) sas_token = kwargs.pop('sas_token', None) if connection_string: return t_adls_service.from_connection_string( connection_string=connection_string) account_url = get_account_url(cli_ctx, account_name=kwargs.pop( 'account_name', None), service='dfs') credential = account_key or sas_token or token_credential if account_url and credential: return t_adls_service(account_url=account_url, credential=credential) return None
def generic_data_service_factory(cli_ctx, service, name=None, key=None, connection_string=None, sas_token=None, socket_timeout=None, token_credential=None): try: return get_storage_data_service_client(cli_ctx, service, name, key, connection_string, sas_token, socket_timeout, token_credential) except ValueError as val_exception: _ERROR_STORAGE_MISSING_INFO = get_sdk( cli_ctx, CUSTOM_DATA_STORAGE, 'common._error#_ERROR_STORAGE_MISSING_INFO') message = str(val_exception) if message == _ERROR_STORAGE_MISSING_INFO: message = NO_CREDENTIALS_ERROR_MESSAGE raise CLIError(message)
def assign_contributor_to_vnet(cli_ctx, vnet, object_id): auth_client = get_mgmt_service_client(cli_ctx, ResourceType.MGMT_AUTHORIZATION) RoleAssignmentCreateParameters = get_sdk(cli_ctx, ResourceType.MGMT_AUTHORIZATION, 'RoleAssignmentCreateParameters', mod='models', operation_group='role_assignments') role_definition_id = resource_id( subscription=get_subscription_id(cli_ctx), namespace='Microsoft.Authorization', type='roleDefinitions', name=NETWORK_CONTRIBUTOR, ) if has_assignment(auth_client.role_assignments.list_for_scope(vnet), role_definition_id, object_id): return _create_role_assignment(auth_client, vnet, RoleAssignmentCreateParameters( role_definition_id=role_definition_id, principal_id=object_id, principal_type='ServicePrincipal', ))
def cf_blob_service(cli_ctx, kwargs): from knack.util import CLIError client_kwargs = {} t_blob_service = get_sdk(cli_ctx, CUSTOM_DATA_STORAGE_BLOB, '_blob_service_client#BlobServiceClient') connection_string = kwargs.pop('connection_string', None) account_name = kwargs.pop('account_name', None) location_mode = kwargs.pop('location_mode', None) if location_mode: client_kwargs['_location_mode'] = location_mode client_kwargs.update(_prepare_client_kwargs_track2(cli_ctx)) if connection_string: return t_blob_service.from_connection_string(conn_str=connection_string, **client_kwargs) account_url = get_account_url(cli_ctx, account_name=account_name, service='blob') credential = get_credential(kwargs) if account_url and credential: return t_blob_service(account_url=account_url, credential=credential, **client_kwargs) raise CLIError("Please provide valid connection string, or account name with account key, " "sas token or login auth mode.")
def deploy_arm_template( cli_ctx, resource_group_name, # pylint: disable=too-many-arguments template_file=None, deployment_name=None, parameters=None, mode=None): DeploymentProperties, _ = get_sdk(cli_ctx, ResourceType.MGMT_RESOURCE_RESOURCES, 'DeploymentProperties', 'TemplateLink', mod='models') template = {} # TODO: get_file_json() can return None if specified, otherwise it can throw an error. template = get_file_json(template_file, preserve_order=True) template_obj = template # So template should always be a dict, otherwise this next line will fail. template_obj['resources'] = template_obj.get('resources', []) # template_obj is not used after this point, can remove it. parameters = BotTemplateDeployer.__process_parameters(parameters) or {} # Turn the template into JSON string, then load it back to a dict, list, etc. template = json.loads(json.dumps(template)) parameters = json.loads(json.dumps(parameters)) properties = DeploymentProperties(template=template, template_link=None, parameters=parameters, mode=mode) smc = get_mgmt_service_client(cli_ctx, ResourceType.MGMT_RESOURCE_RESOURCES) return LongRunningOperation(cli_ctx, 'Deploying ARM Tempalte')( smc.deployments.create_or_update(resource_group_name, deployment_name, properties, raw=False))
def validate_encryption_source(cmd, namespace): ns = vars(namespace) key_name = ns.pop('encryption_key_name', None) key_version = ns.pop('encryption_key_version', None) key_vault_uri = ns.pop('encryption_key_vault', None) if namespace.encryption_key_source == 'Microsoft.Keyvault' and not (key_name and key_version and key_vault_uri): raise ValueError('--encryption-key-name, --encryption-key-vault, and --encryption-key-version are required ' 'when --encryption-key-source=Microsoft.Keyvault is specified.') if key_name or key_version or key_vault_uri: if namespace.encryption_key_source != 'Microsoft.Keyvault': raise ValueError('--encryption-key-name, --encryption-key-vault, and --encryption-key-version are not ' 'applicable when --encryption-key-source=Microsoft.Keyvault is not specified.') KeyVaultProperties = get_sdk(cmd.cli_ctx, CUSTOM_MGMT_PREVIEW_STORAGE, 'KeyVaultProperties', mod='models') if not KeyVaultProperties: return kv_prop = KeyVaultProperties(key_name=key_name, key_version=key_version, key_vault_uri=key_vault_uri) namespace.encryption_key_vault_properties = kv_prop
def get_data_service_client(cli_ctx, service_type, account_name, account_key, connection_string=None, sas_token=None, socket_timeout=None, token_credential=None, endpoint_suffix=None, location_mode=None): logger.debug('Getting data service client service_type=%s', service_type.__name__) try: client_kwargs = { 'account_name': account_name, 'account_key': account_key, 'connection_string': connection_string, 'sas_token': sas_token } if socket_timeout: client_kwargs['socket_timeout'] = socket_timeout if token_credential: client_kwargs['token_credential'] = token_credential if endpoint_suffix: client_kwargs['endpoint_suffix'] = endpoint_suffix client = service_type(**client_kwargs) if location_mode: client.location_mode = location_mode except ValueError as exc: _ERROR_STORAGE_MISSING_INFO = get_sdk( cli_ctx, ResourceType.DATA_STORAGE, 'common._error#_ERROR_STORAGE_MISSING_INFO') if _ERROR_STORAGE_MISSING_INFO in str(exc): raise ValueError(exc) raise CLIError( 'Unable to obtain data client. Check your connection parameters.') # TODO: enable Fiddler client.request_callback = _get_add_headers_callback(cli_ctx) return client
def cf_adls_service(cli_ctx, kwargs): client_kwargs = _prepare_client_kwargs_track2(cli_ctx) client_kwargs = _config_location_mode(kwargs, client_kwargs) t_adls_service = get_sdk(cli_ctx, ResourceType.DATA_STORAGE_FILEDATALAKE, '_data_lake_service_client#DataLakeServiceClient') connection_string = kwargs.pop('connection_string', None) account_name = kwargs.pop('account_name', None) account_key = kwargs.pop('account_key', None) token_credential = kwargs.pop('token_credential', None) sas_token = kwargs.pop('sas_token', None) location_mode = kwargs.pop('location_mode', None) if location_mode: client_kwargs['_location_mode'] = location_mode if connection_string: return t_adls_service.from_connection_string(conn_str=connection_string, **client_kwargs) account_url = get_account_url(cli_ctx, account_name=account_name, service='dfs') credential = account_key or sas_token or token_credential return t_adls_service(account_url=account_url, credential=credential, **client_kwargs)
def assign_identity(cli_ctx, getter, setter, identity_role=None, identity_scope=None): import time from msrestazure.azure_exceptions import CloudError # get resource = getter() resource = setter(resource) # create role assignment: if identity_scope: principal_id = resource.identity.principal_id identity_role_id = resolve_role_id(cli_ctx, identity_role, identity_scope) assignments_client = get_mgmt_service_client(cli_ctx, ResourceType.MGMT_AUTHORIZATION).role_assignments RoleAssignmentCreateParameters = get_sdk(cli_ctx, ResourceType.MGMT_AUTHORIZATION, 'RoleAssignmentCreateParameters', mod='models', operation_group='role_assignments') parameters = RoleAssignmentCreateParameters(role_definition_id=identity_role_id, principal_id=principal_id) logger.info("Creating an assignment with a role '%s' on the scope of '%s'", identity_role_id, identity_scope) retry_times = 36 assignment_name = _gen_guid() for l in range(0, retry_times): try: assignments_client.create(scope=identity_scope, role_assignment_name=assignment_name, parameters=parameters) break except CloudError as ex: if 'role assignment already exists' in ex.message: logger.info('Role assignment already exists') break elif l < retry_times and ' does not exist in the directory ' in ex.message: time.sleep(5) logger.warning('Retrying role assignment creation: %s/%s', l + 1, retry_times) continue else: raise return resource
def _create_role_assignment(cli_ctx, role, assignee, resource_group_name=None, scope=None, resolve_assignee=True): from azure.cli.core.profiles import ResourceType, get_sdk factory = get_auth_management_client(cli_ctx, scope) assignments_client = factory.role_assignments definitions_client = factory.role_definitions if assignments_client.config is None: raise CLIError("Assignments client config is undefined.") scope = build_role_scope(resource_group_name, scope, assignments_client.config.subscription_id) # XXX: if role is uuid, this function's output cannot be used as role assignment defintion id # ref: https://github.com/Azure/azure-cli/issues/2458 role_id = resolve_role_id(role, scope, definitions_client) # If the cluster has service principal resolve the service principal client id to get the object id, # if not use MSI object id. object_id = resolve_object_id(cli_ctx, assignee) if resolve_assignee else assignee RoleAssignmentCreateParameters = get_sdk( cli_ctx, ResourceType.MGMT_AUTHORIZATION, 'RoleAssignmentCreateParameters', mod='models', operation_group='role_assignments') parameters = RoleAssignmentCreateParameters(role_definition_id=role_id, principal_id=object_id) assignment_name = uuid.uuid4() custom_headers = None return assignments_client.create(scope, assignment_name, parameters, custom_headers=custom_headers)
def create_role_definition(self, client, role_name, role_id, role_definition_input): RoleDefinitionBase = get_sdk( self.cli_ctx, ResourceType.MGMT_AUTHORIZATION, 'RoleDefinitionProperties' if self.old_api else 'RoleDefinition', mod='models', operation_group='role_definitions') role_configuration = RoleDefinitionBase( role_name=role_name, description=role_definition_input.get('description', None), type='CustomRole', assignable_scopes=role_definition_input['assignableScopes'], permissions=self._init_permissions(role_definition_input)) scope = role_definition_input['assignableScopes'][0] if self.old_api: return client.create_or_update(role_definition_id=role_id, scope=scope, properties=role_configuration) return client.create_or_update(role_definition_id=role_id, scope=scope, role_definition=role_configuration)
def assign_contributor_to_routetable(cli_ctx, master_subnet, worker_subnet, object_id): auth_client = get_mgmt_service_client(cli_ctx, ResourceType.MGMT_AUTHORIZATION) network_client = get_mgmt_service_client(cli_ctx, ResourceType.MGMT_NETWORK) RoleAssignmentCreateParameters = get_sdk(cli_ctx, ResourceType.MGMT_AUTHORIZATION, 'RoleAssignmentCreateParameters', mod='models', operation_group='role_assignments') role_definition_id = resource_id( subscription=get_subscription_id(cli_ctx), namespace='Microsoft.Authorization', type='roleDefinitions', name=DEVELOPMENT_CONTRIBUTOR if rp_mode_development() else CONTRIBUTOR, ) route_tables = set() for sn in [master_subnet, worker_subnet]: sid = parse_resource_id(sn) subnet = network_client.subnets.get(resource_group_name=sid['resource_group'], virtual_network_name=sid['name'], subnet_name=sid['resource_name']) if subnet.route_table is not None: route_tables.add(subnet.route_table.id) for rt in route_tables: if has_assignment(auth_client.role_assignments.list_for_scope(rt), role_definition_id, object_id): continue role_uuid = _gen_uuid() auth_client.role_assignments.create(rt, role_uuid, RoleAssignmentCreateParameters( role_definition_id=role_definition_id, principal_id=object_id, principal_type='ServicePrincipal', ))
def create_storage_account(resource_group_name, account_name, sku, assign_identity=False, location=None, kind=None, tags=None, custom_domain=None, encryption=None, access_tier=None, https_only=None): StorageAccountCreateParameters, Kind, Sku, CustomDomain, AccessTier, Identity = get_sdk( ResourceType.MGMT_STORAGE, 'StorageAccountCreateParameters', 'Kind', 'Sku', 'CustomDomain', 'AccessTier', 'Identity', mod='models') scf = storage_client_factory() params = StorageAccountCreateParameters(sku=Sku(sku), kind=Kind(kind), location=location, tags=tags) if custom_domain: params.custom_domain = CustomDomain(custom_domain, None) if encryption: params.encryption = encryption if access_tier: params.access_tier = AccessTier(access_tier) if assign_identity: params.identity = Identity() if https_only: params.enable_https_traffic_only = https_only return scf.storage_accounts.create(resource_group_name, account_name, params)
def remove_network_rule(client, resource_group_name, storage_account_name, ip_address=None, subnet=None, vnet_name=None): # pylint: disable=unused-argument sa = client.get_properties(resource_group_name, storage_account_name) rules = sa.network_acls if subnet: rules.virtual_network_rules = [ x for x in rules.virtual_network_rules if not x.virtual_network_resource_id.endswith(subnet) ] if ip_address: rules.ip_rules = [ x for x in rules.ip_rules if x.ip_address_or_range != ip_address ] StorageAccountUpdateParameters = get_sdk(ResourceType.MGMT_STORAGE, 'StorageAccountUpdateParameters', mod='models') params = StorageAccountUpdateParameters(network_acls=rules) return client.update(resource_group_name, storage_account_name, params)
def _check_table_and_content(self, storage_account_name, key, table_name, filter_string, timeout_in_minutes): sleep_period = 15 TableService = get_sdk(self._cmd.cli_ctx, ResourceType.DATA_COSMOS_TABLE, 'table#TableService') table_client = get_data_service_client( self._cmd.cli_ctx, TableService, storage_account_name, key, endpoint_suffix=self._cmd.cli_ctx.cloud.suffixes.storage_endpoint) seconds = 60 * timeout_in_minutes waited = 0 while waited < seconds: entities = table_client.query_entities(table_name, filter_string) if entities.items: return True logger.warning("\t\t\tWait %s seconds for table '%s' has date propagated ...", sleep_period, table_name) time.sleep(sleep_period) waited += sleep_period return False
def cf_blob_service(cli_ctx, kwargs): from knack.util import CLIError t_blob_service = get_sdk(cli_ctx, ResourceType.DATA_STORAGE_BLOB, '_blob_service_client#BlobServiceClient') connection_string = kwargs.pop('connection_string', None) account_name = kwargs.pop('account_name', None) account_key = kwargs.pop('account_key', None) token_credential = kwargs.pop('token_credential', None) sas_token = kwargs.pop('sas_token', None) if connection_string: return t_blob_service.from_connection_string( conn_str=connection_string) account_url = get_account_url(cli_ctx, account_name=account_name, service='blob') credential = account_key or sas_token or token_credential if account_url and credential: return t_blob_service(account_url=account_url, credential=credential) raise CLIError( "Please provide valid connection string, or account name with account key, " "sas token or login auth mode.")
def cf_blob_client(cli_ctx, kwargs): # track2 partial migration if kwargs.get('blob_url'): t_blob_client = get_sdk(cli_ctx, ResourceType.DATA_STORAGE_BLOB, '_blob_client#BlobClient') credential = get_credential(kwargs) # del unused kwargs kwargs.pop('connection_string') kwargs.pop('account_name') kwargs.pop('account_url') kwargs.pop('container_name') kwargs.pop('blob_name') return t_blob_client.from_blob_url(blob_url=kwargs.pop('blob_url'), credential=credential, snapshot=kwargs.pop( 'snapshot', None)) if 'blob_url' in kwargs: kwargs.pop('blob_url') return cf_blob_service(cli_ctx, kwargs).get_blob_client( container=kwargs.pop('container_name'), blob=kwargs.pop('blob_name'), snapshot=kwargs.pop('snapshot', None))
def cf_share_service(cli_ctx, kwargs): client_kwargs = prepare_client_kwargs_track2(cli_ctx) client_kwargs = _config_location_mode(kwargs, client_kwargs) t_share_service = get_sdk(cli_ctx, ResourceType.DATA_STORAGE_FILESHARE, '_share_service_client#ShareServiceClient') connection_string = kwargs.pop('connection_string', None) account_key = kwargs.pop('account_key', None) token_credential = kwargs.pop('token_credential', None) sas_token = kwargs.pop('sas_token', None) account_name = kwargs.pop('account_name', None) account_url = kwargs.pop('account_url', None) if connection_string: return t_share_service.from_connection_string( conn_str=connection_string, **client_kwargs) if not account_url: account_url = get_account_url(cli_ctx, account_name=account_name, service='file') credential = account_key or sas_token or token_credential return t_share_service(account_url=account_url, credential=credential, **client_kwargs)
def transform_file_directory_result(result): """ Transform a the result returned from file and directory listing API. This transformer add and remove properties from File and Directory objects in the given list in order to align the object's properties so as to offer a better view to the file and dir list. """ File, Directory = get_sdk(ResourceType.DATA_STORAGE, 'File', 'Directory', mod='file.models') return_list = [] for each in result: if isinstance(each, File): delattr(each, 'content') setattr(each, 'type', 'file') elif isinstance(each, Directory): setattr(each, 'type', 'dir') return_list.append(each) return return_list
def _query_account_key(account_name): scf = get_mgmt_service_client(ResourceType.MGMT_STORAGE) acc = next( (x for x in scf.storage_accounts.list() if x.name == account_name), None) if acc: from azure.cli.core.commands.arm import parse_resource_id rg = parse_resource_id(acc.id)['resource_group'] (StorageAccountKeys, StorageAccountListKeysResult) = get_sdk( ResourceType.MGMT_STORAGE, 'models.storage_account_keys#StorageAccountKeys', 'models.storage_account_list_keys_result#StorageAccountListKeysResult' ) if StorageAccountKeys: return scf.storage_accounts.list_keys(rg, account_name).key1 elif StorageAccountListKeysResult: return scf.storage_accounts.list_keys(rg, account_name).keys[0].value # pylint: disable=no-member else: raise ValueError( "Storage account '{}' not found.".format(account_name))
def _upload_package_blob(ctx, package_file, url): """Upload the location file to storage url provided by autostorage""" BlockBlobService = get_sdk(ctx, ResourceType.DATA_STORAGE, 'blob#BlockBlobService') uri = urlsplit(url) # in uri path, it always start with '/', so container name is at second block pathParts = uri.path.split('/', 2) container_name = pathParts[1] blob_name = pathParts[2] # we need handle the scenario storage account not in public Azure hostParts = uri.netloc.split('.', 2) account_name = hostParts[0] # endpoint suffix needs to ignore the 'blob' part in the host name endpoint_suffix = hostParts[2] sas_service = BlockBlobService(account_name=account_name, sas_token=uri.query, endpoint_suffix=endpoint_suffix) sas_service.create_blob_from_path( container_name=container_name, blob_name=blob_name, file_path=package_file, )