示例#1
0
 def _create_return_result(blob_name, blob_content_settings, upload_result=None):
     blob_name = normalize_blob_file_path(destination_path, blob_name)
     return {
         'Blob': client.make_blob_url(destination_container_name, blob_name),
         'Type': blob_content_settings.content_type,
         'Last Modified': upload_result.last_modified if upload_result else None,
         'eTag': upload_result.etag if upload_result else None}
示例#2
0
def _copy_file_to_blob_container(blob_service, source_file_service,
                                 destination_container, destination_path,
                                 source_share, source_sas, source_file_dir,
                                 source_file_name):
    from azure.common import AzureException
    file_url, source_file_dir, source_file_name = \
        make_encoded_file_url_and_params(source_file_service, source_share, source_file_dir,
                                         source_file_name, source_sas)

    source_path = os.path.join(
        source_file_dir,
        source_file_name) if source_file_dir else source_file_name
    destination_blob_name = normalize_blob_file_path(destination_path,
                                                     source_path)

    try:
        blob_service.copy_blob(destination_container, destination_blob_name,
                               file_url)
        return blob_service.make_blob_url(destination_container,
                                          destination_blob_name)
    except AzureException as ex:
        from knack.util import CLIError
        error_template = 'Failed to copy file {} to container {}. {}'
        raise CLIError(
            error_template.format(source_file_name, destination_container, ex))
示例#3
0
def _create_file_and_directory_from_blob(file_service,
                                         blob_service,
                                         share,
                                         container,
                                         sas,
                                         blob_name,
                                         destination_dir=None,
                                         metadata=None,
                                         timeout=None,
                                         existing_dirs=None):
    """
    Copy a blob to file share and create the directory if needed.
    """
    from azure.common import AzureException
    from azure.cli.command_modules.storage.util import normalize_blob_file_path

    blob_url = blob_service.make_blob_url(container,
                                          encode_for_url(blob_name),
                                          sas_token=sas)
    full_path = normalize_blob_file_path(destination_dir, blob_name)
    file_name = os.path.basename(full_path)
    dir_name = os.path.dirname(full_path)
    _make_directory_in_files_share(file_service, share, dir_name,
                                   existing_dirs)

    try:
        file_service.copy_file(share, dir_name, file_name, blob_url, metadata,
                               timeout)
        return file_service.make_file_url(share, dir_name, file_name)
    except AzureException:
        error_template = 'Failed to copy blob {} to file share {}. Please check if you have permission to read ' \
                         'source or set a correct sas token.'
        from knack.util import CLIError
        raise CLIError(error_template.format(blob_name, share))
示例#4
0
def _create_file_and_directory_from_file(file_service, source_file_service, share, source_share, sas, source_file_dir,
                                         source_file_name, destination_dir=None, metadata=None, timeout=None,
                                         existing_dirs=None):
    """
    Copy a file from one file share to another
    """
    from azure.common import AzureException
    from azure.cli.command_modules.storage.util import normalize_blob_file_path

    file_url, source_file_dir, source_file_name = make_encoded_file_url_and_params(source_file_service, source_share,
                                                                                   source_file_dir, source_file_name,
                                                                                   sas_token=sas)

    full_path = normalize_blob_file_path(destination_dir, os.path.join(source_file_dir, source_file_name))
    file_name = os.path.basename(full_path)
    dir_name = os.path.dirname(full_path)
    _make_directory_in_files_share(file_service, share, dir_name, existing_dirs)

    try:
        file_service.copy_file(share, dir_name, file_name, file_url, metadata, timeout)
        return file_service.make_file_url(share, dir_name or None, file_name)
    except AzureException:
        error_template = 'Failed to copy file {} from share {} to file share {}. Please check if ' \
                         'you have right permission to read source or set a correct sas token.'
        from knack.util import CLIError
        raise CLIError(error_template.format(file_name, source_share, share))
示例#5
0
    def _upload_action(src, dst):
        dst = normalize_blob_file_path(destination_path, dst)
        dir_name = os.path.dirname(dst)
        file_name = os.path.basename(dst)

        _make_directory_in_files_share(client, destination, dir_name)
        create_file_args = {
            'share_name':
            destination,
            'directory_name':
            dir_name,
            'file_name':
            file_name,
            'local_file_path':
            src,
            'progress_callback':
            progress_callback,
            'content_settings':
            guess_content_type(src, content_settings, settings_class),
            'metadata':
            metadata,
            'max_connections':
            max_connections
        }

        if cmd.supported_api_version(min_api='2016-05-31'):
            create_file_args['validate_content'] = validate_content

        logger.warning('uploading %s', src)
        client.create_file_from_path(**create_file_args)

        return client.make_file_url(destination, dir_name, file_name)
示例#6
0
def _create_file_and_directory_from_file(file_service, source_file_service, share, source_share, sas, source_file_dir,
                                         source_file_name, destination_dir=None, metadata=None, timeout=None,
                                         existing_dirs=None):
    """
    Copy a file from one file share to another
    """
    from azure.common import AzureException
    from azure.cli.command_modules.storage.util import normalize_blob_file_path

    file_url, source_file_dir, source_file_name = make_encoded_file_url_and_params(source_file_service, source_share,
                                                                                   source_file_dir, source_file_name,
                                                                                   sas_token=sas)

    full_path = normalize_blob_file_path(destination_dir, os.path.join(source_file_dir, source_file_name))
    file_name = os.path.basename(full_path)
    dir_name = os.path.dirname(full_path)
    _make_directory_in_files_share(file_service, share, dir_name, existing_dirs)

    try:
        file_service.copy_file(share, dir_name, file_name, file_url, metadata, timeout)
        return file_service.make_file_url(share, dir_name or None, file_name)
    except AzureException:
        error_template = 'Failed to copy file {} from share {} to file share {}. Please check if ' \
                         'you have right permission to read source or set a correct sas token.'
        from knack.util import CLIError
        raise CLIError(error_template.format(file_name, source_share, share))
示例#7
0
 def _create_return_result(blob_name, blob_content_settings, upload_result=None):
     blob_name = normalize_blob_file_path(destination_path, blob_name)
     return {
         'Blob': client.make_blob_url(destination_container_name, blob_name),
         'Type': blob_content_settings.content_type,
         'Last Modified': upload_result.last_modified if upload_result else None,
         'eTag': upload_result.etag if upload_result else None}
示例#8
0
文件: blob.py 项目: zzn2/azure-cli
def storage_blob_download_batch(client, source, destination, source_container_name, pattern=None, dryrun=False,
                                progress_callback=None, max_connections=2):

    def _download_blob(blob_service, container, destination_folder, normalized_blob_name, blob_name):
        # TODO: try catch IO exception
        destination_path = os.path.join(destination_folder, normalized_blob_name)
        destination_folder = os.path.dirname(destination_path)
        if not os.path.exists(destination_folder):
            mkdir_p(destination_folder)

        blob = blob_service.get_blob_to_path(container, blob_name, destination_path, max_connections=max_connections,
                                             progress_callback=progress_callback)
        return blob.name

    source_blobs = collect_blobs(client, source_container_name, pattern)
    blobs_to_download = {}
    for blob_name in source_blobs:
        # remove starting path seperator and normalize
        normalized_blob_name = normalize_blob_file_path(None, blob_name)
        if normalized_blob_name in blobs_to_download:
            raise CLIError('Multiple blobs with download path: `{}`. As a solution, use the `--pattern` parameter '
                           'to select for a subset of blobs to download OR utilize the `storage blob download` '
                           'command instead to download individual blobs.'.format(normalized_blob_name))
        blobs_to_download[normalized_blob_name] = blob_name

    if dryrun:
        logger = get_logger(__name__)
        logger.warning('download action: from %s to %s', source, destination)
        logger.warning('    pattern %s', pattern)
        logger.warning('  container %s', source_container_name)
        logger.warning('      total %d', len(source_blobs))
        logger.warning(' operations')
        for b in source_blobs:
            logger.warning('  - %s', b)
        return []

    # Tell progress reporter to reuse the same hook
    if progress_callback:
        progress_callback.reuse = True

    results = []
    for index, blob_normed in enumerate(blobs_to_download):
        # add blob name and number to progress message
        if progress_callback:
            progress_callback.message = '{}/{}: "{}"'.format(
                index + 1, len(blobs_to_download), blobs_to_download[blob_normed])
        results.append(_download_blob(
            client, source_container_name, destination, blob_normed, blobs_to_download[blob_normed]))

    # end progress hook
    if progress_callback:
        progress_callback.hook.end()

    return results
示例#9
0
def storage_blob_download_batch(client,
                                source,
                                destination,
                                source_container_name,
                                pattern=None,
                                dryrun=False,
                                progress_callback=None,
                                max_connections=2):
    def _download_blob(blob_service, container, destination_folder,
                       normalized_blob_name, blob_name):
        # TODO: try catch IO exception
        destination_path = os.path.join(destination_folder,
                                        normalized_blob_name)
        destination_folder = os.path.dirname(destination_path)
        if not os.path.exists(destination_folder):
            mkdir_p(destination_folder)

        blob = blob_service.get_blob_to_path(
            container,
            blob_name,
            destination_path,
            max_connections=max_connections,
            progress_callback=progress_callback)
        return blob.name

    source_blobs = collect_blobs(client, source_container_name, pattern)
    blobs_to_download = {}
    for blob_name in source_blobs:
        # remove starting path seperator and normalize
        normalized_blob_name = normalize_blob_file_path(None, blob_name)
        if normalized_blob_name in blobs_to_download:
            from knack.util import CLIError
            raise CLIError(
                'Multiple blobs with download path: `{}`. As a solution, use the `--pattern` parameter '
                'to select for a subset of blobs to download OR utilize the `storage blob download` '
                'command instead to download individual blobs.'.format(
                    normalized_blob_name))
        blobs_to_download[normalized_blob_name] = blob_name

    if dryrun:
        logger = get_logger(__name__)
        logger.warning('download action: from %s to %s', source, destination)
        logger.warning('    pattern %s', pattern)
        logger.warning('  container %s', source_container_name)
        logger.warning('      total %d', len(source_blobs))
        logger.warning(' operations')
        for b in source_blobs:
            logger.warning('  - %s', b)
        return []

    return list(
        _download_blob(client, source_container_name, destination, blob_normed,
                       blobs_to_download[blob_normed])
        for blob_normed in blobs_to_download)
示例#10
0
def _copy_blob_to_blob_container(blob_service, source_blob_service, destination_container, destination_path,
                                 source_container, source_sas, source_blob_name):
    from azure.common import AzureException
    source_blob_url = source_blob_service.make_blob_url(source_container, encode_for_url(source_blob_name),
                                                        sas_token=source_sas)
    destination_blob_name = normalize_blob_file_path(destination_path, source_blob_name)
    try:
        blob_service.copy_blob(destination_container, destination_blob_name, source_blob_url)
        return blob_service.make_blob_url(destination_container, destination_blob_name)
    except AzureException:
        error_template = 'Failed to copy blob {} to container {}.'
        raise CLIError(error_template.format(source_blob_name, destination_container))
示例#11
0
def storage_blob_upload_batch(cmd, client, source, destination, pattern=None,  # pylint: disable=too-many-locals
                              source_files=None, destination_path=None,
                              destination_container_name=None, blob_type=None,
                              content_settings=None, metadata=None, validate_content=False,
                              maxsize_condition=None, max_connections=2, lease_id=None, progress_callback=None,
                              if_modified_since=None, if_unmodified_since=None, if_match=None,
                              if_none_match=None, timeout=None, dryrun=False):
    def _create_return_result(blob_name, blob_content_settings, upload_result=None):
        blob_name = normalize_blob_file_path(destination_path, blob_name)
        return {
            'Blob': client.make_blob_url(destination_container_name, blob_name),
            'Type': blob_content_settings.content_type,
            'Last Modified': upload_result.last_modified if upload_result else None,
            'eTag': upload_result.etag if upload_result else None}

    logger = get_logger(__name__)
    t_content_settings = cmd.get_models('blob.models#ContentSettings')

    results = []
    if dryrun:
        logger.info('upload action: from %s to %s', source, destination)
        logger.info('    pattern %s', pattern)
        logger.info('  container %s', destination_container_name)
        logger.info('       type %s', blob_type)
        logger.info('      total %d', len(source_files))
        results = []
        for src, dst in source_files or []:
            results.append(_create_return_result(dst, guess_content_type(src, content_settings, t_content_settings)))
    else:
        @check_precondition_success
        def _upload_blob(*args, **kwargs):
            return upload_blob(*args, **kwargs)

        for src, dst in source_files or []:
            logger.warning('uploading %s', src)
            guessed_content_settings = guess_content_type(src, content_settings, t_content_settings)

            include, result = _upload_blob(cmd, client, destination_container_name,
                                           normalize_blob_file_path(destination_path, dst), src,
                                           blob_type=blob_type, content_settings=guessed_content_settings,
                                           metadata=metadata, validate_content=validate_content,
                                           maxsize_condition=maxsize_condition, max_connections=max_connections,
                                           lease_id=lease_id, progress_callback=progress_callback,
                                           if_modified_since=if_modified_since,
                                           if_unmodified_since=if_unmodified_since, if_match=if_match,
                                           if_none_match=if_none_match, timeout=timeout)
            if include:
                results.append(_create_return_result(dst, guessed_content_settings, result))

        num_failures = len(source_files) - len(results)
        if num_failures:
            logger.warning('%s of %s files not uploaded due to "Failed Precondition"', num_failures, len(source_files))
    return results
示例#12
0
def _copy_blob_to_blob_container(blob_service, source_blob_service, destination_container, destination_path,
                                 source_container, source_sas, source_blob_name):
    from azure.common import AzureException
    source_blob_url = source_blob_service.make_blob_url(source_container, encode_for_url(source_blob_name),
                                                        sas_token=source_sas)
    destination_blob_name = normalize_blob_file_path(destination_path, source_blob_name)
    try:
        blob_service.copy_blob(destination_container, destination_blob_name, source_blob_url)
        return blob_service.make_blob_url(destination_container, destination_blob_name)
    except AzureException:
        error_template = 'Failed to copy blob {} to container {}.'
        raise CLIError(error_template.format(source_blob_name, destination_container))
示例#13
0
def _copy_file_to_blob_container(blob_service, source_file_service, destination_container, destination_path,
                                 source_share, source_sas, source_file_dir, source_file_name):
    from azure.common import AzureException
    file_url, source_file_dir, source_file_name = \
        make_encoded_file_url_and_params(source_file_service, source_share, source_file_dir,
                                         source_file_name, source_sas)

    source_path = os.path.join(source_file_dir, source_file_name) if source_file_dir else source_file_name
    destination_blob_name = normalize_blob_file_path(destination_path, source_path)

    try:
        blob_service.copy_blob(destination_container, destination_blob_name, file_url)
        return blob_service.make_blob_url(destination_container, destination_blob_name)
    except AzureException as ex:
        error_template = 'Failed to copy file {} to container {}. {}'
        raise CLIError(error_template.format(source_file_name, destination_container, ex))
示例#14
0
    def _upload_action(src, dst):
        dst = normalize_blob_file_path(destination_path, dst)
        dir_name = os.path.dirname(dst)
        file_name = os.path.basename(dst)

        _make_directory_in_files_share(client, destination, dir_name)
        create_file_args = {'share_name': destination, 'directory_name': dir_name, 'file_name': file_name,
                            'local_file_path': src, 'progress_callback': progress_callback,
                            'content_settings': guess_content_type(src, content_settings, settings_class),
                            'metadata': metadata, 'max_connections': max_connections}

        if cmd.supported_api_version(min_api='2016-05-31'):
            create_file_args['validate_content'] = validate_content

        logger.warning('uploading %s', src)
        client.create_file_from_path(**create_file_args)

        return client.make_file_url(destination, dir_name, file_name)
示例#15
0
def _create_file_and_directory_from_blob(file_service, blob_service, share, container, sas, blob_name,
                                         destination_dir=None, metadata=None, timeout=None, existing_dirs=None):
    """
    Copy a blob to file share and create the directory if needed.
    """
    from azure.common import AzureException
    from azure.cli.command_modules.storage.util import normalize_blob_file_path

    blob_url = blob_service.make_blob_url(container, encode_for_url(blob_name), sas_token=sas)
    full_path = normalize_blob_file_path(destination_dir, blob_name)
    file_name = os.path.basename(full_path)
    dir_name = os.path.dirname(full_path)
    _make_directory_in_files_share(file_service, share, dir_name, existing_dirs)

    try:
        file_service.copy_file(share, dir_name, file_name, blob_url, metadata, timeout)
        return file_service.make_file_url(share, dir_name, file_name)
    except AzureException:
        error_template = 'Failed to copy blob {} to file share {}. Please check if you have permission to read ' \
                         'source or set a correct sas token.'
        from knack.util import CLIError
        raise CLIError(error_template.format(blob_name, share))
示例#16
0
def storage_blob_download_batch(client, source, destination, source_container_name, pattern=None, dryrun=False,
                                progress_callback=None, max_connections=2):

    def _download_blob(blob_service, container, destination_folder, normalized_blob_name, blob_name):
        # TODO: try catch IO exception
        destination_path = os.path.join(destination_folder, normalized_blob_name)
        destination_folder = os.path.dirname(destination_path)
        if not os.path.exists(destination_folder):
            mkdir_p(destination_folder)

        blob = blob_service.get_blob_to_path(container, blob_name, destination_path, max_connections=max_connections,
                                             progress_callback=progress_callback)
        return blob.name

    source_blobs = collect_blobs(client, source_container_name, pattern)
    blobs_to_download = {}
    for blob_name in source_blobs:
        # remove starting path seperator and normalize
        normalized_blob_name = normalize_blob_file_path(None, blob_name)
        if normalized_blob_name in blobs_to_download:
            from knack.util import CLIError
            raise CLIError('Multiple blobs with download path: `{}`. As a solution, use the `--pattern` parameter '
                           'to select for a subset of blobs to download OR utilize the `storage blob download` '
                           'command instead to download individual blobs.'.format(normalized_blob_name))
        blobs_to_download[normalized_blob_name] = blob_name

    if dryrun:
        logger = get_logger(__name__)
        logger.warning('download action: from %s to %s', source, destination)
        logger.warning('    pattern %s', pattern)
        logger.warning('  container %s', source_container_name)
        logger.warning('      total %d', len(source_blobs))
        logger.warning(' operations')
        for b in source_blobs:
            logger.warning('  - %s', b)
        return []

    return list(_download_blob(client, source_container_name, destination, blob_normed, blobs_to_download[blob_normed])
                for blob_normed in blobs_to_download)
示例#17
0
def storage_blob_upload_batch(
        cmd,
        client,
        source,
        destination,
        pattern=None,  # pylint: disable=too-many-locals
        source_files=None,
        destination_path=None,
        destination_container_name=None,
        blob_type=None,
        content_settings=None,
        metadata=None,
        validate_content=False,
        maxsize_condition=None,
        max_connections=2,
        lease_id=None,
        progress_callback=None,
        if_modified_since=None,
        if_unmodified_since=None,
        if_match=None,
        if_none_match=None,
        timeout=None,
        dryrun=False,
        socket_timeout=None,
        **kwargs):
    def _create_return_result(blob_content_settings, upload_result=None):
        return {
            'Blob': client.url,
            'Type': blob_content_settings.content_type,
            'Last Modified':
            upload_result['last_modified'] if upload_result else None,
            'eTag': upload_result['etag'] if upload_result else None
        }

    source_files = source_files or []
    t_content_settings = cmd.get_models(
        '_models#ContentSettings',
        resource_type=cmd.command_kwargs['resource_type'])

    results = []
    if dryrun:
        logger.info('upload action: from %s to %s', source, destination)
        logger.info('    pattern %s', pattern)
        logger.info('  container %s', destination_container_name)
        logger.info('       type %s', blob_type)
        logger.info('      total %d', len(source_files))
        results = []
        for src, dst in source_files:
            results.append(
                _create_return_result(blob_content_settings=guess_content_type(
                    src, content_settings, t_content_settings)))
    else:

        @check_precondition_success
        def _upload_blob(*args, **kwargs):
            return upload_blob(*args, **kwargs)

        # Tell progress reporter to reuse the same hook
        if progress_callback:
            progress_callback.reuse = True

        for index, source_file in enumerate(source_files):
            src, dst = source_file
            # logger.warning('uploading %s', src)
            guessed_content_settings = guess_content_type(
                src, content_settings, t_content_settings)

            # add blob name and number to progress message
            if progress_callback:
                progress_callback.message = '{}/{}: "{}"'.format(
                    index + 1, len(source_files),
                    normalize_blob_file_path(destination_path, dst))
            blob_client = client.get_blob_client(
                container=destination_container_name,
                blob=normalize_blob_file_path(destination_path, dst))
            include, result = _upload_blob(
                cmd,
                blob_client,
                file_path=src,
                blob_type=blob_type,
                content_settings=guessed_content_settings,
                metadata=metadata,
                validate_content=validate_content,
                maxsize_condition=maxsize_condition,
                max_connections=max_connections,
                lease_id=lease_id,
                progress_callback=progress_callback,
                if_modified_since=if_modified_since,
                if_unmodified_since=if_unmodified_since,
                if_match=if_match,
                if_none_match=if_none_match,
                timeout=timeout,
                **kwargs)
            if include:
                results.append(
                    _create_return_result(
                        blob_content_settings=guessed_content_settings,
                        upload_result=result))
        # end progress hook
        if progress_callback:
            progress_callback.hook.end()
        num_failures = len(source_files) - len(results)
        if num_failures:
            logger.warning(
                '%s of %s files not uploaded due to "Failed Precondition"',
                num_failures, len(source_files))
    return results