コード例 #1
0
def storage_blob_upload_batch(cmd, client, source, destination, pattern=None,  # pylint: disable=too-many-locals
                              source_files=None, destination_path=None,
                              destination_container_name=None, blob_type=None,
                              content_settings=None, metadata=None, validate_content=False,
                              maxsize_condition=None, max_connections=2, lease_id=None, progress_callback=None,
                              if_modified_since=None, if_unmodified_since=None, if_match=None,
                              if_none_match=None, timeout=None, dryrun=False):
    def _create_return_result(blob_name, blob_content_settings, upload_result=None):
        blob_name = normalize_blob_file_path(destination_path, blob_name)
        return {
            'Blob': client.make_blob_url(destination_container_name, blob_name),
            'Type': blob_content_settings.content_type,
            'Last Modified': upload_result.last_modified if upload_result else None,
            'eTag': upload_result.etag if upload_result else None}

    logger = get_logger(__name__)
    t_content_settings = cmd.get_models('blob.models#ContentSettings')

    results = []
    if dryrun:
        logger.info('upload action: from %s to %s', source, destination)
        logger.info('    pattern %s', pattern)
        logger.info('  container %s', destination_container_name)
        logger.info('       type %s', blob_type)
        logger.info('      total %d', len(source_files))
        results = []
        for src, dst in source_files or []:
            results.append(_create_return_result(dst, guess_content_type(src, content_settings, t_content_settings)))
    else:
        @check_precondition_success
        def _upload_blob(*args, **kwargs):
            return upload_blob(*args, **kwargs)

        for src, dst in source_files or []:
            logger.warning('uploading %s', src)
            guessed_content_settings = guess_content_type(src, content_settings, t_content_settings)

            include, result = _upload_blob(cmd, client, destination_container_name,
                                           normalize_blob_file_path(destination_path, dst), src,
                                           blob_type=blob_type, content_settings=guessed_content_settings,
                                           metadata=metadata, validate_content=validate_content,
                                           maxsize_condition=maxsize_condition, max_connections=max_connections,
                                           lease_id=lease_id, progress_callback=progress_callback,
                                           if_modified_since=if_modified_since,
                                           if_unmodified_since=if_unmodified_since, if_match=if_match,
                                           if_none_match=if_none_match, timeout=timeout)
            if include:
                results.append(_create_return_result(dst, guessed_content_settings, result))

        num_failures = len(source_files) - len(results)
        if num_failures:
            logger.warning('%s of %s files not uploaded due to "Failed Precondition"', num_failures, len(source_files))
    return results
コード例 #2
0
    def _upload_action(src, dst):
        dst = normalize_blob_file_path(destination_path, dst)
        dir_name = os.path.dirname(dst)
        file_name = os.path.basename(dst)

        _make_directory_in_files_share(client, destination, dir_name)
        create_file_args = {
            'share_name':
            destination,
            'directory_name':
            dir_name,
            'file_name':
            file_name,
            'local_file_path':
            src,
            'progress_callback':
            progress_callback,
            'content_settings':
            guess_content_type(src, content_settings, settings_class),
            'metadata':
            metadata,
            'max_connections':
            max_connections
        }

        if cmd.supported_api_version(min_api='2016-05-31'):
            create_file_args['validate_content'] = validate_content

        logger.warning('uploading %s', src)
        client.create_file_from_path(**create_file_args)

        return client.make_file_url(destination, dir_name, file_name)
コード例 #3
0
    def validator(cmd, namespace):
        t_blob_content_settings = cmd.get_models(
            '_models#ContentSettings', resource_type=CUSTOM_DATA_STORAGE_BLOB)

        # must run certain validators first for an update
        if update:
            validate_client_parameters(cmd, namespace)

        ns = vars(namespace)
        clear_content_settings = ns.pop('clear_content_settings', False)

        # retrieve the existing object properties for an update
        if update and not clear_content_settings:
            account = ns.get('account_name')
            key = ns.get('account_key')
            cs = ns.get('connection_string')
            sas = ns.get('sas_token')
            token_credential = ns.get('token_credential')
            if _class_name(settings_class) == _class_name(
                    t_blob_content_settings):
                container = ns.get('container_name')
                blob = ns.get('blob_name')
                lease_id = ns.get('lease_id')
                client = cf_blob_client(cmd.cli_ctx,
                                        connection_string=cs,
                                        account_name=account,
                                        account_key=key,
                                        token_credential=token_credential,
                                        sas_token=sas,
                                        container=container,
                                        blob=blob)

                props = client.get_blob_properties(
                    lease=lease_id).content_settings

        # create new properties
        new_props = settings_class(
            content_type=ns.pop('content_type', None),
            content_disposition=ns.pop('content_disposition', None),
            content_encoding=ns.pop('content_encoding', None),
            content_language=ns.pop('content_language', None),
            content_md5=ns.pop('content_md5', None),
            cache_control=ns.pop('content_cache_control', None))

        # if update, fill in any None values with existing
        if update:
            if not clear_content_settings:
                for attr in [
                        'content_type', 'content_disposition',
                        'content_encoding', 'content_language', 'content_md5',
                        'cache_control'
                ]:
                    if getattr(new_props, attr) is None:
                        setattr(new_props, attr, getattr(props, attr))
        else:
            if guess_from_file:
                new_props = guess_content_type(ns[guess_from_file], new_props,
                                               settings_class)

        ns['content_settings'] = new_props
コード例 #4
0
    def _upload_action(src, dst):
        dir_name = os.path.dirname(dst)
        file_name = os.path.basename(dst)

        _make_directory_in_files_share(client, destination, dir_name)
        create_file_args = {
            'share_name':
            destination,
            'directory_name':
            dir_name,
            'file_name':
            file_name,
            'local_file_path':
            src,
            'content_settings':
            guess_content_type(src, content_settings, settings_class),
            'metadata':
            metadata,
            'max_connections':
            max_connections,
        }

        if supported_api_version(ResourceType.DATA_STORAGE,
                                 min_api='2016-05-31'):
            create_file_args['validate_content'] = validate_content

        logger.warning('uploading %s', src)
        client.create_file_from_path(**create_file_args)

        return client.make_file_url(destination, dir_name, file_name)
コード例 #5
0
ファイル: _validators.py プロジェクト: mevtorres/Azure-CLI
    def validator(cmd, namespace):
        t_base_blob_service, t_file_service, t_blob_content_settings, t_file_content_settings = cmd.get_models(
            'blob.baseblobservice#BaseBlobService',
            'file#FileService',
            'blob.models#ContentSettings',
            'file.models#ContentSettings')

        # must run certain validators first for an update
        if update:
            validate_client_parameters(cmd, namespace)
        if update and _class_name(settings_class) == _class_name(t_file_content_settings):
            get_file_path_validator()(namespace)
        ns = vars(namespace)

        # retrieve the existing object properties for an update
        if update:
            account = ns.get('account_name')
            key = ns.get('account_key')
            cs = ns.get('connection_string')
            sas = ns.get('sas_token')
            if _class_name(settings_class) == _class_name(t_blob_content_settings):
                client = get_storage_data_service_client(cmd.cli_ctx,
                                                         t_base_blob_service,
                                                         account,
                                                         key,
                                                         cs,
                                                         sas)
                container = ns.get('container_name')
                blob = ns.get('blob_name')
                lease_id = ns.get('lease_id')
                props = client.get_blob_properties(container, blob, lease_id=lease_id).properties.content_settings
            elif _class_name(settings_class) == _class_name(t_file_content_settings):
                client = get_storage_data_service_client(cmd.cli_ctx, t_file_service, account, key, cs, sas)
                share = ns.get('share_name')
                directory = ns.get('directory_name')
                filename = ns.get('file_name')
                props = client.get_file_properties(share, directory, filename).properties.content_settings

        # create new properties
        new_props = settings_class(
            content_type=ns.pop('content_type', None),
            content_disposition=ns.pop('content_disposition', None),
            content_encoding=ns.pop('content_encoding', None),
            content_language=ns.pop('content_language', None),
            content_md5=ns.pop('content_md5', None),
            cache_control=ns.pop('content_cache_control', None)
        )

        # if update, fill in any None values with existing
        if update:
            for attr in ['content_type', 'content_disposition', 'content_encoding', 'content_language', 'content_md5',
                         'cache_control']:
                if getattr(new_props, attr) is None:
                    setattr(new_props, attr, getattr(props, attr))
        else:
            if guess_from_file:
                new_props = guess_content_type(ns[guess_from_file], new_props, settings_class)

        ns['content_settings'] = new_props
コード例 #6
0
ファイル: blob.py プロジェクト: yolocs/azure-cli
def storage_blob_upload_batch(cmd, client, source, destination, pattern=None,  # pylint: disable=too-many-locals
                              source_files=None,
                              destination_container_name=None, blob_type=None,
                              content_settings=None, metadata=None, validate_content=False,
                              maxsize_condition=None, max_connections=2, lease_id=None,
                              if_modified_since=None, if_unmodified_since=None, if_match=None,
                              if_none_match=None, timeout=None, dryrun=False):
    def _create_return_result(blob_name, blob_content_settings, upload_result=None):
        return {
            'Blob': client.make_blob_url(destination_container_name, blob_name),
            'Type': blob_content_settings.content_type,
            'Last Modified': upload_result.last_modified if upload_result else None,
            'eTag': upload_result.etag if upload_result else None}

    logger = get_logger(__name__)
    t_content_settings = cmd.get_models('blob.models#ContentSettings')

    results = []
    if dryrun:
        logger.info('upload action: from %s to %s', source, destination)
        logger.info('    pattern %s', pattern)
        logger.info('  container %s', destination_container_name)
        logger.info('       type %s', blob_type)
        logger.info('      total %d', len(source_files))
        results = []
        for src, dst in source_files or []:
            results.append(_create_return_result(dst, guess_content_type(src, content_settings, t_content_settings)))
    else:
        for src, dst in source_files or []:
            logger.warning('uploading %s', src)
            guessed_content_settings = guess_content_type(src, content_settings, t_content_settings)
            result = upload_blob(cmd, client, destination_container_name, dst, src,
                                 blob_type=blob_type, content_settings=guessed_content_settings, metadata=metadata,
                                 validate_content=validate_content, maxsize_condition=maxsize_condition,
                                 max_connections=max_connections, lease_id=lease_id,
                                 if_modified_since=if_modified_since, if_unmodified_since=if_unmodified_since,
                                 if_match=if_match, if_none_match=if_none_match, timeout=timeout)
            results.append(_create_return_result(dst, guessed_content_settings, result))
    return results
コード例 #7
0
ファイル: file.py プロジェクト: sptramer/azure-cli
    def _upload_action(src, dst):
        dst = normalize_blob_file_path(destination_path, dst)
        dir_name = os.path.dirname(dst)
        file_name = os.path.basename(dst)

        _make_directory_in_files_share(client, destination, dir_name)
        create_file_args = {'share_name': destination, 'directory_name': dir_name, 'file_name': file_name,
                            'local_file_path': src, 'progress_callback': progress_callback,
                            'content_settings': guess_content_type(src, content_settings, settings_class),
                            'metadata': metadata, 'max_connections': max_connections}

        if cmd.supported_api_version(min_api='2016-05-31'):
            create_file_args['validate_content'] = validate_content

        logger.warning('uploading %s', src)
        client.create_file_from_path(**create_file_args)

        return client.make_file_url(destination, dir_name, file_name)
コード例 #8
0
ファイル: file.py プロジェクト: LukaszStem/azure-cli
def storage_file_upload_batch(client, destination, source, pattern=None, dryrun=False, validate_content=False,
                              content_settings=None, max_connections=1, metadata=None):
    """ Upload local files to Azure Storage File Share in batch """

    from .util import glob_files_locally
    source_files = [c for c in glob_files_locally(source, pattern)]
    logger = get_az_logger(__name__)
    settings_class = get_sdk(ResourceType.DATA_STORAGE, 'file.models#ContentSettings')

    if dryrun:
        logger.info('upload files to file share')
        logger.info('    account %s', client.account_name)
        logger.info('      share %s', destination)
        logger.info('      total %d', len(source_files or []))
        return [{'File': client.make_file_url(destination, os.path.dirname(src), os.path.basename(dst)),
                 'Type': guess_content_type(src, content_settings, settings_class).content_type}
                for src, dst in source_files]

    # TODO: Performance improvement
    # 1. Upload files in parallel
    def _upload_action(src, dst):
        dir_name = os.path.dirname(dst)
        file_name = os.path.basename(dst)

        _make_directory_in_files_share(client, destination, dir_name)
        create_file_args = {
            'share_name': destination,
            'directory_name': dir_name,
            'file_name': file_name,
            'local_file_path': src,
            'content_settings': guess_content_type(src, content_settings, settings_class),
            'metadata': metadata,
            'max_connections': max_connections,
        }

        if supported_api_version(ResourceType.DATA_STORAGE, min_api='2016-05-31'):
            create_file_args['validate_content'] = validate_content

        logger.warning('uploading %s', src)
        client.create_file_from_path(**create_file_args)

        return client.make_file_url(destination, dir_name, file_name)

    return list(_upload_action(src, dst) for src, dst in source_files)
コード例 #9
0
def storage_file_upload_batch(client, destination, source, pattern=None, dryrun=False, validate_content=False,
                              content_settings=None, max_connections=1, metadata=None):
    """ Upload local files to Azure Storage File Share in batch """

    from .util import glob_files_locally
    source_files = [c for c in glob_files_locally(source, pattern)]
    logger = get_az_logger(__name__)
    settings_class = get_sdk(ResourceType.DATA_STORAGE, 'file.models#ContentSettings')

    if dryrun:
        logger.info('upload files to file share')
        logger.info('    account %s', client.account_name)
        logger.info('      share %s', destination)
        logger.info('      total %d', len(source_files))
        return [{'File': client.make_file_url(destination, os.path.dirname(src), os.path.basename(dst)),
                 'Type': guess_content_type(src, content_settings, settings_class).content_type}
                for src, dst in source_files]

    # TODO: Performance improvement
    # 1. Upload files in parallel
    def _upload_action(src, dst):
        dir_name = os.path.dirname(dst)
        file_name = os.path.basename(dst)

        _make_directory_in_files_share(client, destination, dir_name)
        create_file_args = {
            'share_name': destination,
            'directory_name': dir_name,
            'file_name': file_name,
            'local_file_path': src,
            'content_settings': guess_content_type(src, content_settings, settings_class),
            'metadata': metadata,
            'max_connections': max_connections,
        }

        if supported_api_version(ResourceType.DATA_STORAGE, min_api='2016-05-31'):
            create_file_args['validate_content'] = validate_content

        logger.warning('uploading %s', src)
        client.create_file_from_path(**create_file_args)

        return client.make_file_url(destination, dir_name, file_name)

    return list(_upload_action(src, dst) for src, dst in source_files)
コード例 #10
0
ファイル: file.py プロジェクト: avanigupta/azure-cli
def storage_file_upload_batch(cmd, client, destination, source, destination_path=None, pattern=None, dryrun=False,
                              validate_content=False, content_settings=None, max_connections=1, metadata=None,
                              progress_callback=None):
    """ Upload local files to Azure Storage File Share in batch """

    from azure.cli.command_modules.storage.util import glob_files_locally, normalize_blob_file_path

    source_files = list(glob_files_locally(source, pattern))
    logger = get_logger(__name__)
    settings_class = cmd.get_models('file.models#ContentSettings')

    if dryrun:
        logger.info('upload files to file share')
        logger.info('    account %s', client.account_name)
        logger.info('      share %s', destination)
        logger.info('      total %d', len(source_files))
        return [{'File': client.make_file_url(destination, os.path.dirname(dst) or None, os.path.basename(dst)),
                 'Type': guess_content_type(src, content_settings, settings_class).content_type} for src, dst in
                source_files]

    # TODO: Performance improvement
    # 1. Upload files in parallel
    def _upload_action(src, dst):
        dst = normalize_blob_file_path(destination_path, dst)
        dir_name = os.path.dirname(dst)
        file_name = os.path.basename(dst)

        _make_directory_in_files_share(client, destination, dir_name)
        create_file_args = {'share_name': destination, 'directory_name': dir_name, 'file_name': file_name,
                            'local_file_path': src, 'progress_callback': progress_callback,
                            'content_settings': guess_content_type(src, content_settings, settings_class),
                            'metadata': metadata, 'max_connections': max_connections}

        if cmd.supported_api_version(min_api='2016-05-31'):
            create_file_args['validate_content'] = validate_content

        logger.warning('uploading %s', src)
        client.create_file_from_path(**create_file_args)

        return client.make_file_url(destination, dir_name, file_name)

    return list(_upload_action(src, dst) for src, dst in source_files)
コード例 #11
0
ファイル: file.py プロジェクト: LukaszStem/azure-cli
    def _upload_action(src, dst):
        dir_name = os.path.dirname(dst)
        file_name = os.path.basename(dst)

        _make_directory_in_files_share(client, destination, dir_name)
        create_file_args = {
            'share_name': destination,
            'directory_name': dir_name,
            'file_name': file_name,
            'local_file_path': src,
            'content_settings': guess_content_type(src, content_settings, settings_class),
            'metadata': metadata,
            'max_connections': max_connections,
        }

        if supported_api_version(ResourceType.DATA_STORAGE, min_api='2016-05-31'):
            create_file_args['validate_content'] = validate_content

        logger.warning('uploading %s', src)
        client.create_file_from_path(**create_file_args)

        return client.make_file_url(destination, dir_name, file_name)
コード例 #12
0
    def validator(cmd, namespace):
        t_blob_content_settings = cmd.get_models(
            '_models#ContentSettings', resource_type=CUSTOM_DATA_STORAGE_BLOB)

        # must run certain validators first for an update
        if update:
            validate_client_parameters(cmd, namespace)

        ns = vars(namespace)
        clear_content_settings = ns.pop('clear_content_settings', False)

        # retrieve the existing object properties for an update
        if update and not clear_content_settings:
            account = ns.get('account_name')
            key = ns.get('account_key')
            cs = ns.get('connection_string')
            sas = ns.get('sas_token')
            token_credential = ns.get('token_credential')
            if _class_name(settings_class) == _class_name(
                    t_blob_content_settings):
                container = ns.get('container_name')
                blob = ns.get('blob_name')
                lease_id = ns.get('lease_id')
                client_kwargs = {
                    'connection_string': cs,
                    'account_name': account,
                    'account_key': key,
                    'token_credential': token_credential,
                    'sas_token': sas,
                    'container_name': container,
                    'blob_name': blob
                }
                client = cf_blob_client(cmd.cli_ctx, client_kwargs)

                props = client.get_blob_properties(
                    lease=lease_id).content_settings

        # create new properties
        new_props = settings_class(
            content_type=ns.pop('content_type', None),
            content_disposition=ns.pop('content_disposition', None),
            content_encoding=ns.pop('content_encoding', None),
            content_language=ns.pop('content_language', None),
            content_md5=ns.pop('content_md5', None),
            cache_control=ns.pop('content_cache_control', None))

        # if update, fill in any None values with existing
        if update:
            if not clear_content_settings:
                for attr in [
                        'content_type', 'content_disposition',
                        'content_encoding', 'content_language', 'content_md5',
                        'cache_control'
                ]:
                    if getattr(new_props, attr) is None:
                        setattr(new_props, attr, getattr(props, attr))
        else:
            if guess_from_file:
                new_props = guess_content_type(ns[guess_from_file], new_props,
                                               settings_class)

        # In track2 SDK, the content_md5 type should be bytearray. And then it will serialize to a string for request.
        # To keep consistent with track1 input and CLI will treat all parameter values as string. Here is to transform
        # content_md5 value to bytearray. And track2 SDK will serialize it into the right value with str type in header.
        if process_md5 and new_props.content_md5:
            from .track2_util import _str_to_bytearray
            new_props.content_md5 = _str_to_bytearray(new_props.content_md5)

        ns['content_settings'] = new_props
コード例 #13
0
ファイル: blob.py プロジェクト: ranisha2/azure-cli-1
def upload_blob(cmd, client, file_path, container_name=None, blob_name=None, blob_type=None, content_settings=None,
                metadata=None, validate_content=False, maxsize_condition=None, max_connections=2, lease_id=None,
                tier=None, if_modified_since=None, if_unmodified_since=None, if_match=None, if_none_match=None,
                timeout=None, progress_callback=None, encryption_scope=None):
    """Upload a blob to a container."""

    if encryption_scope:
        count = os.path.getsize(file_path)
        with open(file_path, 'rb') as stream:
            data = stream.read(count)
        from azure.core import MatchConditions
        upload_args = {
            'content_settings': content_settings,
            'metadata': metadata,
            'timeout': timeout,
            'if_modified_since': if_modified_since,
            'if_unmodified_since': if_unmodified_since,
            'blob_type': transform_blob_type(cmd, blob_type),
            'validate_content': validate_content,
            'lease': lease_id,
            'max_concurrency': max_connections,
        }

        if cmd.supported_api_version(min_api='2017-04-17') and tier:
            upload_args['premium_page_blob_tier'] = tier
        if maxsize_condition:
            upload_args['maxsize_condition'] = maxsize_condition
        if cmd.supported_api_version(min_api='2016-05-31'):
            upload_args['validate_content'] = validate_content

        # Precondition Check
        if if_match:
            if if_match == '*':
                upload_args['match_condition'] = MatchConditions.IfPresent
            else:
                upload_args['etag'] = if_match
                upload_args['match_condition'] = MatchConditions.IfNotModified

        if if_none_match:
            upload_args['etag'] = if_none_match
            upload_args['match_condition'] = MatchConditions.IfModified
        response = client.upload_blob(data=data, length=count, encryption_scope=encryption_scope, **upload_args)
        if response['content_md5'] is not None:
            from msrest import Serializer
            response['content_md5'] = Serializer.serialize_bytearray(response['content_md5'])
        return response

    t_content_settings = cmd.get_models('blob.models#ContentSettings')
    content_settings = guess_content_type(file_path, content_settings, t_content_settings)

    def upload_append_blob():
        check_blob_args = {
            'container_name': container_name,
            'blob_name': blob_name,
            'lease_id': lease_id,
            'if_modified_since': if_modified_since,
            'if_unmodified_since': if_unmodified_since,
            'if_match': if_match,
            'if_none_match': if_none_match,
            'timeout': timeout
        }

        if client.exists(container_name, blob_name):
            # used to check for the preconditions as append_blob_from_path() cannot
            client.get_blob_properties(**check_blob_args)
        else:
            client.create_blob(content_settings=content_settings, metadata=metadata, **check_blob_args)

        append_blob_args = {
            'container_name': container_name,
            'blob_name': blob_name,
            'file_path': file_path,
            'progress_callback': progress_callback,
            'maxsize_condition': maxsize_condition,
            'lease_id': lease_id,
            'timeout': timeout
        }

        if cmd.supported_api_version(min_api='2016-05-31'):
            append_blob_args['validate_content'] = validate_content

        return client.append_blob_from_path(**append_blob_args)

    def upload_block_blob():
        # increase the block size to 100MB when the block list will contain more than 50,000 blocks
        if os.path.isfile(file_path) and os.stat(file_path).st_size > 50000 * 4 * 1024 * 1024:
            client.MAX_BLOCK_SIZE = 100 * 1024 * 1024
            client.MAX_SINGLE_PUT_SIZE = 256 * 1024 * 1024

        create_blob_args = {
            'container_name': container_name,
            'blob_name': blob_name,
            'file_path': file_path,
            'progress_callback': progress_callback,
            'content_settings': content_settings,
            'metadata': metadata,
            'max_connections': max_connections,
            'lease_id': lease_id,
            'if_modified_since': if_modified_since,
            'if_unmodified_since': if_unmodified_since,
            'if_match': if_match,
            'if_none_match': if_none_match,
            'timeout': timeout
        }

        if cmd.supported_api_version(min_api='2017-04-17') and tier:
            create_blob_args['premium_page_blob_tier'] = tier

        if cmd.supported_api_version(min_api='2016-05-31'):
            create_blob_args['validate_content'] = validate_content

        return client.create_blob_from_path(**create_blob_args)

    type_func = {
        'append': upload_append_blob,
        'block': upload_block_blob,
        'page': upload_block_blob  # same implementation
    }
    return type_func[blob_type]()
コード例 #14
0
ファイル: custom.py プロジェクト: smoghe/azure-cli
def upload_blob(client, container_name, blob_name, file_path, blob_type=None, content_settings=None, metadata=None,
                validate_content=False, maxsize_condition=None, max_connections=2, lease_id=None, tier=None,
                if_modified_since=None, if_unmodified_since=None, if_match=None, if_none_match=None, timeout=None):
    """Upload a blob to a container."""

    settings_class = get_sdk(ResourceType.DATA_STORAGE, 'blob.models#ContentSettings')
    content_settings = guess_content_type(file_path, content_settings, settings_class)

    def upload_append_blob():
        if not client.exists(container_name, blob_name):
            client.create_blob(
                container_name=container_name,
                blob_name=blob_name,
                content_settings=content_settings,
                metadata=metadata,
                lease_id=lease_id,
                if_modified_since=if_modified_since,
                if_match=if_match,
                if_none_match=if_none_match,
                timeout=timeout)

        append_blob_args = {
            'container_name': container_name,
            'blob_name': blob_name,
            'file_path': file_path,
            'progress_callback': _update_progress,
            'maxsize_condition': maxsize_condition,
            'lease_id': lease_id,
            'timeout': timeout
        }

        if supported_api_version(ResourceType.DATA_STORAGE, min_api='2016-05-31'):
            append_blob_args['validate_content'] = validate_content

        return client.append_blob_from_path(**append_blob_args)

    def upload_block_blob():
        import os

        # increase the block size to 100MB when the file is larger than 200GB
        if os.path.isfile(file_path) and os.stat(file_path).st_size > 200 * 1024 * 1024 * 1024:
            client.MAX_BLOCK_SIZE = 100 * 1024 * 1024
            client.MAX_SINGLE_PUT_SIZE = 256 * 1024 * 1024

        create_blob_args = {
            'container_name': container_name,
            'blob_name': blob_name,
            'file_path': file_path,
            'progress_callback': _update_progress,
            'content_settings': content_settings,
            'metadata': metadata,
            'max_connections': max_connections,
            'lease_id': lease_id,
            'if_modified_since': if_modified_since,
            'if_unmodified_since': if_unmodified_since,
            'if_match': if_match,
            'if_none_match': if_none_match,
            'timeout': timeout
        }

        if supported_api_version(ResourceType.DATA_STORAGE, min_api='2017-04-17') and tier:
            create_blob_args['premium_page_blob_tier'] = tier

        if supported_api_version(ResourceType.DATA_STORAGE, min_api='2016-05-31'):
            create_blob_args['validate_content'] = validate_content

        return client.create_blob_from_path(**create_blob_args)

    type_func = {
        'append': upload_append_blob,
        'block': upload_block_blob,
        'page': upload_block_blob  # same implementation
    }
    return type_func[blob_type]()
コード例 #15
0
ファイル: blob.py プロジェクト: jiayexie/azure-cli
def upload_blob(cmd, client, container_name, blob_name, file_path, blob_type=None, content_settings=None, metadata=None,
                validate_content=False, maxsize_condition=None, max_connections=2, lease_id=None, tier=None,
                if_modified_since=None, if_unmodified_since=None, if_match=None, if_none_match=None, timeout=None,
                progress_callback=None):
    """Upload a blob to a container."""

    t_content_settings = cmd.get_models('blob.models#ContentSettings')
    content_settings = guess_content_type(file_path, content_settings, t_content_settings)

    def upload_append_blob():
        check_blob_args = {
            'container_name': container_name,
            'blob_name': blob_name,
            'lease_id': lease_id,
            'if_modified_since': if_modified_since,
            'if_unmodified_since': if_unmodified_since,
            'if_match': if_match,
            'if_none_match': if_none_match,
            'timeout': timeout
        }

        if client.exists(container_name, blob_name):
            # used to check for the preconditions as append_blob_from_path() cannot
            client.get_blob_properties(**check_blob_args)
        else:
            client.create_blob(content_settings=content_settings, metadata=metadata, **check_blob_args)

        append_blob_args = {
            'container_name': container_name,
            'blob_name': blob_name,
            'file_path': file_path,
            'progress_callback': progress_callback,
            'maxsize_condition': maxsize_condition,
            'lease_id': lease_id,
            'timeout': timeout
        }

        if cmd.supported_api_version(min_api='2016-05-31'):
            append_blob_args['validate_content'] = validate_content

        return client.append_blob_from_path(**append_blob_args)

    def upload_block_blob():
        # increase the block size to 100MB when the block list will contain more than 50,000 blocks
        if os.path.isfile(file_path) and os.stat(file_path).st_size > 50000 * 4 * 1024 * 1024:
            client.MAX_BLOCK_SIZE = 100 * 1024 * 1024
            client.MAX_SINGLE_PUT_SIZE = 256 * 1024 * 1024

        create_blob_args = {
            'container_name': container_name,
            'blob_name': blob_name,
            'file_path': file_path,
            'progress_callback': progress_callback,
            'content_settings': content_settings,
            'metadata': metadata,
            'max_connections': max_connections,
            'lease_id': lease_id,
            'if_modified_since': if_modified_since,
            'if_unmodified_since': if_unmodified_since,
            'if_match': if_match,
            'if_none_match': if_none_match,
            'timeout': timeout
        }

        if cmd.supported_api_version(min_api='2017-04-17') and tier:
            create_blob_args['premium_page_blob_tier'] = tier

        if cmd.supported_api_version(min_api='2016-05-31'):
            create_blob_args['validate_content'] = validate_content

        return client.create_blob_from_path(**create_blob_args)

    type_func = {
        'append': upload_append_blob,
        'block': upload_block_blob,
        'page': upload_block_blob  # same implementation
    }
    return type_func[blob_type]()
コード例 #16
0
def storage_blob_upload_batch(
        client,
        source,
        destination,
        pattern=None,
        source_files=None,  # pylint: disable=too-many-locals
        destination_container_name=None,
        blob_type=None,
        content_settings=None,
        metadata=None,
        validate_content=False,
        maxsize_condition=None,
        max_connections=2,
        lease_id=None,
        if_modified_since=None,
        if_unmodified_since=None,
        if_match=None,
        if_none_match=None,
        timeout=None,
        dryrun=False):
    """
    Upload files to storage container as blobs

    :param str source:
        The directory where the files to be uploaded.

    :param str destination:
        The string represents the destination of this upload operation. The source can be the
        container URL or the container name. When the source is the container URL, the storage
        account name will parsed from the URL.

    :param str pattern:
        The pattern is used for files globbing. The supported patterns are '*', '?', '[seq]',
        and '[!seq]'.

    :param bool dryrun:
        Show the summary of the operations to be taken instead of actually upload the file(s)

    :param string if_match:
        An ETag value, or the wildcard character (*). Specify this header to perform the operation
        only if the resource's ETag matches the value specified.

    :param string if_none_match:
        An ETag value, or the wildcard character (*). Specify this header to perform the
        operation only if the resource's ETag does not match the value specified. Specify the
        wildcard character (*) to perform the operation only if the resource does not exist,
        and fail the operation if it does exist.
    """
    def _append_blob(file_path, blob_name, blob_content_settings):
        if not client.exists(destination_container_name, blob_name):
            client.create_blob(container_name=destination_container_name,
                               blob_name=blob_name,
                               content_settings=blob_content_settings,
                               metadata=metadata,
                               lease_id=lease_id,
                               if_modified_since=if_modified_since,
                               if_match=if_match,
                               if_none_match=if_none_match,
                               timeout=timeout)

        append_blob_args = {
            'container_name': destination_container_name,
            'blob_name': blob_name,
            'file_path': file_path,
            'progress_callback': lambda c, t: None,
            'maxsize_condition': maxsize_condition,
            'lease_id': lease_id,
            'timeout': timeout
        }

        if supported_api_version(ResourceType.DATA_STORAGE,
                                 min_api='2016-05-31'):
            append_blob_args['validate_content'] = validate_content

        return client.append_blob_from_path(**append_blob_args)

    def _upload_blob(file_path, blob_name, blob_content_settings):
        create_blob_args = {
            'container_name': destination_container_name,
            'blob_name': blob_name,
            'file_path': file_path,
            'progress_callback': lambda c, t: None,
            'content_settings': blob_content_settings,
            'metadata': metadata,
            'max_connections': max_connections,
            'lease_id': lease_id,
            'if_modified_since': if_modified_since,
            'if_unmodified_since': if_unmodified_since,
            'if_match': if_match,
            'if_none_match': if_none_match,
            'timeout': timeout
        }

        if supported_api_version(ResourceType.DATA_STORAGE,
                                 min_api='2016-05-31'):
            create_blob_args['validate_content'] = validate_content

        return client.create_blob_from_path(**create_blob_args)

    def _create_return_result(blob_name,
                              blob_content_settings,
                              upload_result=None):
        return {
            'Blob': client.make_blob_url(destination_container_name,
                                         blob_name),
            'Type': blob_content_settings.content_type,
            'Last Modified':
            upload_result.last_modified if upload_result else None,
            'eTag': upload_result.etag if upload_result else None
        }

    upload_action = _upload_blob if blob_type == 'block' or blob_type == 'page' else _append_blob
    logger = get_az_logger(__name__)
    settings_class = get_sdk(ResourceType.DATA_STORAGE,
                             'blob.models#ContentSettings')

    results = []
    if dryrun:
        logger.info('upload action: from %s to %s', source, destination)
        logger.info('    pattern %s', pattern)
        logger.info('  container %s', destination_container_name)
        logger.info('       type %s', blob_type)
        logger.info('      total %d', len(source_files))
        results = []
        for src, dst in source_files or []:
            results.append(
                _create_return_result(
                    dst,
                    guess_content_type(src, content_settings, settings_class)))
    else:
        for src, dst in source_files or []:
            logger.warning('uploading {}'.format(src))
            guessed_content_settings = guess_content_type(
                src, content_settings, settings_class)
            results.append(
                _create_return_result(
                    dst, guessed_content_settings,
                    upload_action(src, dst, guessed_content_settings)))

    return results
コード例 #17
0
def storage_blob_upload_batch(
        cmd,
        client,
        source,
        destination,
        pattern=None,  # pylint: disable=too-many-locals
        source_files=None,
        destination_path=None,
        destination_container_name=None,
        blob_type=None,
        content_settings=None,
        metadata=None,
        validate_content=False,
        maxsize_condition=None,
        max_connections=2,
        lease_id=None,
        progress_callback=None,
        if_modified_since=None,
        if_unmodified_since=None,
        if_match=None,
        if_none_match=None,
        timeout=None,
        dryrun=False,
        socket_timeout=None,
        **kwargs):
    def _create_return_result(blob_content_settings, upload_result=None):
        return {
            'Blob': client.url,
            'Type': blob_content_settings.content_type,
            'Last Modified':
            upload_result['last_modified'] if upload_result else None,
            'eTag': upload_result['etag'] if upload_result else None
        }

    source_files = source_files or []
    t_content_settings = cmd.get_models(
        '_models#ContentSettings',
        resource_type=cmd.command_kwargs['resource_type'])

    results = []
    if dryrun:
        logger.info('upload action: from %s to %s', source, destination)
        logger.info('    pattern %s', pattern)
        logger.info('  container %s', destination_container_name)
        logger.info('       type %s', blob_type)
        logger.info('      total %d', len(source_files))
        results = []
        for src, dst in source_files:
            results.append(
                _create_return_result(blob_content_settings=guess_content_type(
                    src, content_settings, t_content_settings)))
    else:

        @check_precondition_success
        def _upload_blob(*args, **kwargs):
            return upload_blob(*args, **kwargs)

        # Tell progress reporter to reuse the same hook
        if progress_callback:
            progress_callback.reuse = True

        for index, source_file in enumerate(source_files):
            src, dst = source_file
            # logger.warning('uploading %s', src)
            guessed_content_settings = guess_content_type(
                src, content_settings, t_content_settings)

            # add blob name and number to progress message
            if progress_callback:
                progress_callback.message = '{}/{}: "{}"'.format(
                    index + 1, len(source_files),
                    normalize_blob_file_path(destination_path, dst))
            blob_client = client.get_blob_client(
                container=destination_container_name,
                blob=normalize_blob_file_path(destination_path, dst))
            include, result = _upload_blob(
                cmd,
                blob_client,
                file_path=src,
                blob_type=blob_type,
                content_settings=guessed_content_settings,
                metadata=metadata,
                validate_content=validate_content,
                maxsize_condition=maxsize_condition,
                max_connections=max_connections,
                lease_id=lease_id,
                progress_callback=progress_callback,
                if_modified_since=if_modified_since,
                if_unmodified_since=if_unmodified_since,
                if_match=if_match,
                if_none_match=if_none_match,
                timeout=timeout,
                **kwargs)
            if include:
                results.append(
                    _create_return_result(
                        blob_content_settings=guessed_content_settings,
                        upload_result=result))
        # end progress hook
        if progress_callback:
            progress_callback.hook.end()
        num_failures = len(source_files) - len(results)
        if num_failures:
            logger.warning(
                '%s of %s files not uploaded due to "Failed Precondition"',
                num_failures, len(source_files))
    return results
コード例 #18
0
ファイル: blob.py プロジェクト: sdas-tech/azure-cli
def upload_blob(cmd,
                client,
                container_name,
                blob_name,
                file_path,
                blob_type=None,
                content_settings=None,
                metadata=None,
                validate_content=False,
                maxsize_condition=None,
                max_connections=2,
                lease_id=None,
                tier=None,
                if_modified_since=None,
                if_unmodified_since=None,
                if_match=None,
                if_none_match=None,
                timeout=None,
                progress_callback=None):
    """Upload a blob to a container."""

    t_content_settings = cmd.get_models('blob.models#ContentSettings')
    content_settings = guess_content_type(file_path, content_settings,
                                          t_content_settings)

    def upload_append_blob():
        if not client.exists(container_name, blob_name):
            client.create_blob(container_name=container_name,
                               blob_name=blob_name,
                               content_settings=content_settings,
                               metadata=metadata,
                               lease_id=lease_id,
                               if_modified_since=if_modified_since,
                               if_match=if_match,
                               if_none_match=if_none_match,
                               timeout=timeout)

        append_blob_args = {
            'container_name': container_name,
            'blob_name': blob_name,
            'file_path': file_path,
            'progress_callback': progress_callback,
            'maxsize_condition': maxsize_condition,
            'lease_id': lease_id,
            'timeout': timeout
        }

        if cmd.supported_api_version(min_api='2016-05-31'):
            append_blob_args['validate_content'] = validate_content

        return client.append_blob_from_path(**append_blob_args)

    def upload_block_blob():
        # increase the block size to 100MB when the file is larger than 200GB
        if os.path.isfile(file_path) and os.stat(
                file_path).st_size > 200 * 1024 * 1024 * 1024:
            client.MAX_BLOCK_SIZE = 100 * 1024 * 1024
            client.MAX_SINGLE_PUT_SIZE = 256 * 1024 * 1024

        create_blob_args = {
            'container_name': container_name,
            'blob_name': blob_name,
            'file_path': file_path,
            'progress_callback': progress_callback,
            'content_settings': content_settings,
            'metadata': metadata,
            'max_connections': max_connections,
            'lease_id': lease_id,
            'if_modified_since': if_modified_since,
            'if_unmodified_since': if_unmodified_since,
            'if_match': if_match,
            'if_none_match': if_none_match,
            'timeout': timeout
        }

        if cmd.supported_api_version(min_api='2017-04-17') and tier:
            create_blob_args['premium_page_blob_tier'] = tier

        if cmd.supported_api_version(min_api='2016-05-31'):
            create_blob_args['validate_content'] = validate_content

        return client.create_blob_from_path(**create_blob_args)

    type_func = {
        'append': upload_append_blob,
        'block': upload_block_blob,
        'page': upload_block_blob  # same implementation
    }
    return type_func[blob_type]()
コード例 #19
0
ファイル: blob.py プロジェクト: LukaszStem/azure-cli
def storage_blob_upload_batch(client, source, destination, pattern=None, source_files=None,  # pylint: disable=too-many-locals
                              destination_container_name=None, blob_type=None,
                              content_settings=None, metadata=None, validate_content=False,
                              maxsize_condition=None, max_connections=2, lease_id=None,
                              if_modified_since=None, if_unmodified_since=None, if_match=None,
                              if_none_match=None, timeout=None, dryrun=False):
    """
    Upload files to storage container as blobs

    :param str source:
        The directory where the files to be uploaded.

    :param str destination:
        The string represents the destination of this upload operation. The source can be the
        container URL or the container name. When the source is the container URL, the storage
        account name will parsed from the URL.

    :param str pattern:
        The pattern is used for files globbing. The supported patterns are '*', '?', '[seq]',
        and '[!seq]'.

    :param bool dryrun:
        Show the summary of the operations to be taken instead of actually upload the file(s)

    :param string if_match:
        An ETag value, or the wildcard character (*). Specify this header to perform the operation
        only if the resource's ETag matches the value specified.

    :param string if_none_match:
        An ETag value, or the wildcard character (*). Specify this header to perform the
        operation only if the resource's ETag does not match the value specified. Specify the
        wildcard character (*) to perform the operation only if the resource does not exist,
        and fail the operation if it does exist.
    """

    def _append_blob(file_path, blob_name, blob_content_settings):
        if not client.exists(destination_container_name, blob_name):
            client.create_blob(
                container_name=destination_container_name,
                blob_name=blob_name,
                content_settings=blob_content_settings,
                metadata=metadata,
                lease_id=lease_id,
                if_modified_since=if_modified_since,
                if_match=if_match,
                if_none_match=if_none_match,
                timeout=timeout)

        append_blob_args = {
            'container_name': destination_container_name,
            'blob_name': blob_name,
            'file_path': file_path,
            'progress_callback': lambda c, t: None,
            'maxsize_condition': maxsize_condition,
            'lease_id': lease_id,
            'timeout': timeout
        }

        if supported_api_version(ResourceType.DATA_STORAGE, min_api='2016-05-31'):
            append_blob_args['validate_content'] = validate_content

        return client.append_blob_from_path(**append_blob_args)

    def _upload_blob(file_path, blob_name, blob_content_settings):
        create_blob_args = {
            'container_name': destination_container_name,
            'blob_name': blob_name,
            'file_path': file_path,
            'progress_callback': lambda c, t: None,
            'content_settings': blob_content_settings,
            'metadata': metadata,
            'max_connections': max_connections,
            'lease_id': lease_id,
            'if_modified_since': if_modified_since,
            'if_unmodified_since': if_unmodified_since,
            'if_match': if_match,
            'if_none_match': if_none_match,
            'timeout': timeout
        }

        if supported_api_version(ResourceType.DATA_STORAGE, min_api='2016-05-31'):
            create_blob_args['validate_content'] = validate_content

        return client.create_blob_from_path(**create_blob_args)

    def _create_return_result(blob_name, blob_content_settings, upload_result=None):
        return {
            'Blob': client.make_blob_url(destination_container_name, blob_name),
            'Type': blob_content_settings.content_type,
            'Last Modified': upload_result.last_modified if upload_result else None,
            'eTag': upload_result.etag if upload_result else None}

    upload_action = _upload_blob if blob_type == 'block' or blob_type == 'page' else _append_blob
    logger = get_az_logger(__name__)
    settings_class = get_sdk(ResourceType.DATA_STORAGE, 'blob.models#ContentSettings')

    results = []
    if dryrun:
        logger.info('upload action: from %s to %s', source, destination)
        logger.info('    pattern %s', pattern)
        logger.info('  container %s', destination_container_name)
        logger.info('       type %s', blob_type)
        logger.info('      total %d', len(source_files))
        results = []
        for src, dst in source_files or []:
            results.append(_create_return_result(dst, guess_content_type(src, content_settings, settings_class)))
    else:
        for src, dst in source_files or []:
            logger.warning('uploading {}'.format(src))
            guessed_content_settings = guess_content_type(src, content_settings, settings_class)
            results.append(
                _create_return_result(dst, guessed_content_settings, upload_action(src, dst, guessed_content_settings)))

    return results