Пример #1
0
def process_blob_upload_batch_parameters(cmd, namespace):
    """Process the source and destination of storage blob upload command"""
    import os

    # 1. quick check
    if not os.path.exists(namespace.source):
        raise ValueError('incorrect usage: source {} does not exist'.format(namespace.source))

    if not os.path.isdir(namespace.source):
        raise ValueError('incorrect usage: source must be a directory')

    # 2. try to extract account name and container name from destination string
    from .storage_url_helpers import StorageResourceIdentifier
    identifier = StorageResourceIdentifier(cmd.cli_ctx.cloud, namespace.destination)

    if not identifier.is_url():
        namespace.destination_container_name = namespace.destination
    elif identifier.blob is not None:
        raise ValueError('incorrect usage: destination cannot be a blob url')
    else:
        namespace.destination_container_name = identifier.container

        if namespace.account_name:
            if namespace.account_name != identifier.account_name:
                raise ValueError(
                    'The given storage account name is not consistent with the account name in the destination URI')
        else:
            namespace.account_name = identifier.account_name

        if not (namespace.account_key or namespace.sas_token or namespace.connection_string):
            validate_client_parameters(cmd, namespace)

        # it is possible the account name be overwritten by the connection string
        if namespace.account_name != identifier.account_name:
            raise ValueError(
                'The given storage account name is not consistent with the account name in the destination URI')

        if not (namespace.account_key or namespace.sas_token or namespace.connection_string):
            raise ValueError('Missing storage account credential information.')

    # 3. collect the files to be uploaded
    namespace.source = os.path.realpath(namespace.source)
    namespace.source_files = [c for c in glob_files_locally(namespace.source, namespace.pattern)]

    # 4. determine blob type
    if namespace.blob_type is None:
        vhd_files = [f for f in namespace.source_files if f[0].endswith('.vhd')]
        if any(vhd_files) and len(vhd_files) == len(namespace.source_files):
            # when all the listed files are vhd files use page
            namespace.blob_type = 'page'
        elif any(vhd_files):
            # source files contain vhd files but not all of them
            from knack.util import CLIError
            raise CLIError("""Fail to guess the required blob type. Type of the files to be
            uploaded are not consistent. Default blob type for .vhd files is "page", while
            others are "block". You can solve this problem by either explicitly set the blob
            type or ensure the pattern matches a correct set of files.""")
        else:
            namespace.blob_type = 'block'
Пример #2
0
def process_blob_upload_batch_parameters(cmd, namespace):
    """Process the source and destination of storage blob upload command"""
    import os

    # 1. quick check
    if not os.path.exists(namespace.source):
        raise ValueError('incorrect usage: source {} does not exist'.format(namespace.source))

    if not os.path.isdir(namespace.source):
        raise ValueError('incorrect usage: source must be a directory')

    # 2. try to extract account name and container name from destination string
    from .storage_url_helpers import StorageResourceIdentifier
    identifier = StorageResourceIdentifier(cmd.cli_ctx.cloud, namespace.destination)

    if not identifier.is_url():
        namespace.destination_container_name = namespace.destination
    elif identifier.blob is not None:
        raise ValueError('incorrect usage: destination cannot be a blob url')
    else:
        namespace.destination_container_name = identifier.container

        if namespace.account_name:
            if namespace.account_name != identifier.account_name:
                raise ValueError(
                    'The given storage account name is not consistent with the account name in the destination URI')
        else:
            namespace.account_name = identifier.account_name

        if not (namespace.account_key or namespace.sas_token or namespace.connection_string):
            validate_client_parameters(cmd, namespace)

        # it is possible the account name be overwritten by the connection string
        if namespace.account_name != identifier.account_name:
            raise ValueError(
                'The given storage account name is not consistent with the account name in the destination URI')

        if not (namespace.account_key or namespace.sas_token or namespace.connection_string):
            raise ValueError('Missing storage account credential information.')

    # 3. collect the files to be uploaded
    namespace.source = os.path.realpath(namespace.source)
    namespace.source_files = [c for c in glob_files_locally(namespace.source, namespace.pattern)]

    # 4. determine blob type
    if namespace.blob_type is None:
        vhd_files = [f for f in namespace.source_files if f[0].endswith('.vhd')]
        if any(vhd_files) and len(vhd_files) == len(namespace.source_files):
            # when all the listed files are vhd files use page
            namespace.blob_type = 'page'
        elif any(vhd_files):
            # source files contain vhd files but not all of them
            from knack.util import CLIError
            raise CLIError("""Fail to guess the required blob type. Type of the files to be
            uploaded are not consistent. Default blob type for .vhd files is "page", while
            others are "block". You can solve this problem by either explicitly set the blob
            type or ensure the pattern matches a correct set of files.""")
        else:
            namespace.blob_type = 'block'
Пример #3
0
def storage_file_upload_batch(cmd, client, destination, source, destination_path=None, pattern=None, dryrun=False,
                              validate_content=False, content_settings=None, max_connections=1, metadata=None,
                              progress_callback=None):
    """ Upload local files to Azure Storage File Share in batch """

    from azure.cli.command_modules.storage.util import glob_files_locally, normalize_blob_file_path

    source_files = [c for c in glob_files_locally(source, pattern)]
    logger = get_logger(__name__)
    settings_class = cmd.get_models('file.models#ContentSettings')

    if dryrun:
        logger.info('upload files to file share')
        logger.info('    account %s', client.account_name)
        logger.info('      share %s', destination)
        logger.info('      total %d', len(source_files))
        return [{'File': client.make_file_url(destination, os.path.dirname(dst) or None, os.path.basename(dst)),
                 'Type': guess_content_type(src, content_settings, settings_class).content_type} for src, dst in
                source_files]

    # TODO: Performance improvement
    # 1. Upload files in parallel
    def _upload_action(src, dst):
        dst = normalize_blob_file_path(destination_path, dst)
        dir_name = os.path.dirname(dst)
        file_name = os.path.basename(dst)

        _make_directory_in_files_share(client, destination, dir_name)
        create_file_args = {'share_name': destination, 'directory_name': dir_name, 'file_name': file_name,
                            'local_file_path': src, 'progress_callback': progress_callback,
                            'content_settings': guess_content_type(src, content_settings, settings_class),
                            'metadata': metadata, 'max_connections': max_connections}

        if cmd.supported_api_version(min_api='2016-05-31'):
            create_file_args['validate_content'] = validate_content

        logger.warning('uploading %s', src)
        client.create_file_from_path(**create_file_args)

        return client.make_file_url(destination, dir_name, file_name)

    return list(_upload_action(src, dst) for src, dst in source_files)
Пример #4
0
def process_blob_upload_batch_parameters(cmd, namespace):
    """Process the source and destination of storage blob upload command"""
    import os

    # 1. quick check
    if not os.path.exists(namespace.source) or not os.path.isdir(namespace.source):
        raise ValueError('incorrect usage: source must be an existing directory')

    # 2. try to extract account name and container name from destination string
    _process_blob_batch_container_parameters(cmd, namespace, source=False)

    # 3. collect the files to be uploaded
    namespace.source = os.path.realpath(namespace.source)
    namespace.source_files = [c for c in glob_files_locally(namespace.source, namespace.pattern)]

    # 4. determine blob type
    if namespace.blob_type is None:
        vhd_files = [f for f in namespace.source_files if f[0].endswith('.vhd')]
        if any(vhd_files) and len(vhd_files) == len(namespace.source_files):
            # when all the listed files are vhd files use page
            namespace.blob_type = 'page'
        elif any(vhd_files):
            # source files contain vhd files but not all of them
            from knack.util import CLIError
            raise CLIError("""Fail to guess the required blob type. Type of the files to be
            uploaded are not consistent. Default blob type for .vhd files is "page", while
            others are "block". You can solve this problem by either explicitly set the blob
            type or ensure the pattern matches a correct set of files.""")
        else:
            namespace.blob_type = 'block'

    # 5. call other validators
    validate_metadata(namespace)
    t_blob_content_settings = cmd.loader.get_sdk('blob.models#ContentSettings')
    get_content_setting_validator(t_blob_content_settings, update=False)(cmd, namespace)
    add_progress_callback(cmd, namespace)