Beispiel #1
0
    def validator(cmd, namespace):
        t_base_blob_service, t_file_service, t_blob_content_settings, t_file_content_settings = cmd.get_models(
            'blob.baseblobservice#BaseBlobService',
            'file#FileService',
            'blob.models#ContentSettings',
            'file.models#ContentSettings')

        # must run certain validators first for an update
        if update:
            validate_client_parameters(cmd, namespace)
        if update and _class_name(settings_class) == _class_name(t_file_content_settings):
            get_file_path_validator()(namespace)
        ns = vars(namespace)

        # retrieve the existing object properties for an update
        if update:
            account = ns.get('account_name')
            key = ns.get('account_key')
            cs = ns.get('connection_string')
            sas = ns.get('sas_token')
            if _class_name(settings_class) == _class_name(t_blob_content_settings):
                client = get_storage_data_service_client(cmd.cli_ctx,
                                                         t_base_blob_service,
                                                         account,
                                                         key,
                                                         cs,
                                                         sas)
                container = ns.get('container_name')
                blob = ns.get('blob_name')
                lease_id = ns.get('lease_id')
                props = client.get_blob_properties(container, blob, lease_id=lease_id).properties.content_settings
            elif _class_name(settings_class) == _class_name(t_file_content_settings):
                client = get_storage_data_service_client(cmd.cli_ctx, t_file_service, account, key, cs, sas)
                share = ns.get('share_name')
                directory = ns.get('directory_name')
                filename = ns.get('file_name')
                props = client.get_file_properties(share, directory, filename).properties.content_settings

        # create new properties
        new_props = settings_class(
            content_type=ns.pop('content_type', None),
            content_disposition=ns.pop('content_disposition', None),
            content_encoding=ns.pop('content_encoding', None),
            content_language=ns.pop('content_language', None),
            content_md5=ns.pop('content_md5', None),
            cache_control=ns.pop('content_cache_control', None)
        )

        # if update, fill in any None values with existing
        if update:
            for attr in ['content_type', 'content_disposition', 'content_encoding', 'content_language', 'content_md5',
                         'cache_control']:
                if getattr(new_props, attr) is None:
                    setattr(new_props, attr, getattr(props, attr))
        else:
            if guess_from_file:
                new_props = guess_content_type(ns[guess_from_file], new_props, settings_class)

        ns['content_settings'] = new_props
Beispiel #2
0
def get_storage_client(cli_ctx, service_type, namespace):
    from azure.cli.command_modules.storage._client_factory import get_storage_data_service_client

    az_config = cli_ctx.config

    name = getattr(namespace, 'account_name', az_config.get('storage', 'account', None))
    key = getattr(namespace, 'account_key', az_config.get('storage', 'key', None))
    connection_string = getattr(namespace, 'connection_string', az_config.get('storage', 'connection_string', None))
    sas_token = getattr(namespace, 'sas_token', az_config.get('storage', 'sas_token', None))

    return get_storage_data_service_client(cli_ctx, service_type, name, key, connection_string, sas_token)
Beispiel #3
0
def get_storage_client(cli_ctx, service_type, namespace):
    from azure.cli.command_modules.storage._client_factory import get_storage_data_service_client

    az_config = cli_ctx.config

    name = getattr(namespace, 'account_name', az_config.get('storage', 'account', None))
    key = getattr(namespace, 'account_key', az_config.get('storage', 'key', None))
    connection_string = getattr(namespace, 'connection_string', az_config.get('storage', 'connection_string', None))
    sas_token = getattr(namespace, 'sas_token', az_config.get('storage', 'sas_token', None))

    return get_storage_data_service_client(cli_ctx, service_type, name, key, connection_string, sas_token)
Beispiel #4
0
def validate_container_public_access(cmd, namespace):
    from .sdkutil import get_container_access_type
    t_base_blob_svc = cmd.get_models('blob.baseblobservice#BaseBlobService')

    if namespace.public_access:
        namespace.public_access = get_container_access_type(cmd.cli_ctx, namespace.public_access.lower())

        if hasattr(namespace, 'signed_identifiers'):
            # must retrieve the existing ACL to simulate a patch operation because these calls
            # are needlessly conflated
            ns = vars(namespace)
            validate_client_parameters(cmd, namespace)
            account = ns.get('account_name')
            key = ns.get('account_key')
            cs = ns.get('connection_string')
            sas = ns.get('sas_token')
            client = get_storage_data_service_client(cmd.cli_ctx, t_base_blob_svc, account, key, cs, sas)
            container = ns.get('container_name')
            lease_id = ns.get('lease_id')
            ns['signed_identifiers'] = client.get_container_acl(container, lease_id=lease_id)
Beispiel #5
0
def validate_container_public_access(cmd, namespace):
    from .sdkutil import get_container_access_type
    t_base_blob_svc = cmd.get_models('blob.baseblobservice#BaseBlobService')

    if namespace.public_access:
        namespace.public_access = get_container_access_type(cmd.cli_ctx, namespace.public_access.lower())

        if hasattr(namespace, 'signed_identifiers'):
            # must retrieve the existing ACL to simulate a patch operation because these calls
            # are needlessly conflated
            ns = vars(namespace)
            validate_client_parameters(cmd, namespace)
            account = ns.get('account_name')
            key = ns.get('account_key')
            cs = ns.get('connection_string')
            sas = ns.get('sas_token')
            client = get_storage_data_service_client(cmd.cli_ctx, t_base_blob_svc, account, key, cs, sas)
            container = ns.get('container_name')
            lease_id = ns.get('lease_id')
            ns['signed_identifiers'] = client.get_container_acl(container, lease_id=lease_id)
Beispiel #6
0
def get_source_file_or_blob_service_client(cmd, namespace):
    """
    Create the second file service or blob service client for batch copy command, which is used to
    list the source files or blobs. If both the source account and source URI are omitted, it
    indicates that user want to copy files or blobs in the same storage account, therefore the
    destination client will be set None hence the command will use destination client.
    """
    t_file_svc, t_block_blob_svc = cmd.get_models(
        'file#FileService', 'blob.blockblobservice#BlockBlobService')
    usage_string = 'invalid usage: supply only one of the following argument sets:' + \
                   '\n\t   --source-uri' + \
                   '\n\tOR --source-container' + \
                   '\n\tOR --source-container --source-account-name --source-account-key' + \
                   '\n\tOR --source-container --source-account-name --source-sas' + \
                   '\n\tOR --source-share --source-account-name --source-account-key' + \
                   '\n\tOR --source-share --source-account-name --source-account-sas'

    ns = vars(namespace)
    source_account = ns.pop('source_account_name', None)
    source_key = ns.pop('source_account_key', None)
    source_uri = ns.pop('source_uri', None)
    source_sas = ns.get('source_sas', None)
    source_container = ns.get('source_container', None)
    source_share = ns.get('source_share', None)

    if source_uri and source_account:
        raise ValueError(usage_string)

    elif (not source_account) and (not source_uri):
        # Set the source_client to None if neither source_account or source_uri is given. This
        # indicates the command that the source files share or blob container is in the same storage
        # account as the destination file share.
        #
        # The command itself should create the source service client since the validator can't
        # access the destination client through the namespace.
        #
        # A few arguments check will be made as well so as not to cause ambiguity.

        if source_key:
            raise ValueError(
                'invalid usage: --source-account-key is set but --source-account-name'
                ' is missing.')

        if source_container and source_share:
            raise ValueError(usage_string)

        if not source_container and not source_share:
            raise ValueError(usage_string)

        ns['source_client'] = None

    elif source_account:
        if source_container and source_share:
            raise ValueError(usage_string)

        if not (source_key or source_sas):
            # when either storage account key or SAS is given, try to fetch the key in the current
            # subscription
            source_key = _query_account_key(cmd.cli_ctx, source_account)

        if source_container:
            ns['source_client'] = get_storage_data_service_client(
                cmd.cli_ctx,
                t_block_blob_svc,
                name=source_account,
                key=source_key,
                sas_token=source_sas)
        elif source_share:
            ns['source_client'] = get_storage_data_service_client(
                cmd.cli_ctx,
                t_file_svc,
                name=source_account,
                key=source_key,
                sas_token=source_sas)
        else:
            raise ValueError(usage_string)

    elif source_uri:
        if source_sas or source_key or source_container or source_share:
            raise ValueError(usage_string)

        from .storage_url_helpers import StorageResourceIdentifier
        identifier = StorageResourceIdentifier(cmd.cli_ctx.cloud, source_uri)
        nor_container_or_share = not identifier.container and not identifier.share
        if not identifier.is_url():
            raise ValueError('incorrect usage: --source-uri expects a URI')
        elif identifier.blob or identifier.directory or \
                identifier.filename or nor_container_or_share:
            raise ValueError(
                'incorrect usage: --source-uri has to be blob container or file share'
            )
        elif identifier.container:
            ns['source_container'] = identifier.container
            if identifier.account_name != ns.get('account_name'):
                ns['source_client'] = get_storage_data_service_client(
                    cmd.cli_ctx,
                    t_block_blob_svc,
                    name=identifier.account_name,
                    sas_token=identifier.sas_token)
        elif identifier.share:
            ns['source_share'] = identifier.share
            if identifier.account_name != ns.get('account_name'):
                ns['source_client'] = get_storage_data_service_client(
                    cmd.cli_ctx,
                    t_file_svc,
                    name=identifier.account_name,
                    sas_token=identifier.sas_token)
Beispiel #7
0
def get_source_file_or_blob_service_client(cmd, namespace):
    """
    Create the second file service or blob service client for batch copy command, which is used to
    list the source files or blobs. If both the source account and source URI are omitted, it
    indicates that user want to copy files or blobs in the same storage account, therefore the
    destination client will be set None hence the command will use destination client.
    """
    t_file_svc, t_block_blob_svc = cmd.get_models('file#FileService', 'blob.blockblobservice#BlockBlobService')
    usage_string = 'invalid usage: supply only one of the following argument sets:' + \
                   '\n\t   --source-uri  [--source-sas]' + \
                   '\n\tOR --source-container' + \
                   '\n\tOR --source-container --source-account-name --source-account-key' + \
                   '\n\tOR --source-container --source-account-name --source-sas' + \
                   '\n\tOR --source-share --source-account-name --source-account-key' + \
                   '\n\tOR --source-share --source-account-name --source-account-sas'

    ns = vars(namespace)
    source_account = ns.pop('source_account_name', None)
    source_key = ns.pop('source_account_key', None)
    source_uri = ns.pop('source_uri', None)
    source_sas = ns.get('source_sas', None)
    source_container = ns.get('source_container', None)
    source_share = ns.get('source_share', None)

    if source_uri and source_account:
        raise ValueError(usage_string)
    if not source_uri and bool(source_container) == bool(source_share):  # must be container or share
        raise ValueError(usage_string)

    if (not source_account) and (not source_uri):
        # Set the source_client to None if neither source_account or source_uri is given. This
        # indicates the command that the source files share or blob container is in the same storage
        # account as the destination file share or blob container.
        #
        # The command itself should create the source service client since the validator can't
        # access the destination client through the namespace.
        #
        # A few arguments check will be made as well so as not to cause ambiguity.
        if source_key or source_sas:
            raise ValueError('invalid usage: --source-account-name is missing; the source account is assumed to be the'
                             ' same as the destination account. Do not provide --source-sas or --source-account-key')
        ns['source_client'] = None

        if 'token_credential' not in ns:  # not using oauth
            return
        # oauth is only possible through destination, must still get source creds
        source_account, source_key, source_sas = ns['account_name'], ns['account_key'], ns['sas_token']

    if source_account:
        if not (source_key or source_sas):
            # when neither storage account key or SAS is given, try to fetch the key in the current
            # subscription
            source_key = _query_account_key(cmd.cli_ctx, source_account)

        if source_container:
            ns['source_client'] = get_storage_data_service_client(
                cmd.cli_ctx, t_block_blob_svc, name=source_account, key=source_key, sas_token=source_sas)
        elif source_share:
            ns['source_client'] = get_storage_data_service_client(
                cmd.cli_ctx, t_file_svc, name=source_account, key=source_key, sas_token=source_sas)
    elif source_uri:
        if source_key or source_container or source_share:
            raise ValueError(usage_string)

        from .storage_url_helpers import StorageResourceIdentifier
        if source_sas:
            source_uri = '{}{}{}'.format(source_uri, '?', source_sas.lstrip('?'))
        identifier = StorageResourceIdentifier(cmd.cli_ctx.cloud, source_uri)
        nor_container_or_share = not identifier.container and not identifier.share
        if not identifier.is_url():
            raise ValueError('incorrect usage: --source-uri expects a URI')
        elif identifier.blob or identifier.directory or \
                identifier.filename or nor_container_or_share:
            raise ValueError('incorrect usage: --source-uri has to be blob container or file share')

        if identifier.sas_token:
            ns['source_sas'] = identifier.sas_token
        else:
            source_key = _query_account_key(cmd.cli_ctx, identifier.account_name)

        if identifier.container:
            ns['source_container'] = identifier.container
            if identifier.account_name != ns.get('account_name'):
                ns['source_client'] = get_storage_data_service_client(
                    cmd.cli_ctx, t_block_blob_svc, name=identifier.account_name, key=source_key,
                    sas_token=identifier.sas_token)
        elif identifier.share:
            ns['source_share'] = identifier.share
            if identifier.account_name != ns.get('account_name'):
                ns['source_client'] = get_storage_data_service_client(
                    cmd.cli_ctx, t_file_svc, name=identifier.account_name, key=source_key,
                    sas_token=identifier.sas_token)