def copyBlobToBlob(sourceUrl, sourceKey, destUrl, destKey): blobservice = BlobService(destUrl, destkey) srcblobservice = BlobService(SourceUrl, srckey) today = datetime.datetime.utcnow() todayPlusMonth = today + datetime.timedelta(1) todayPlusMonthISO = todayPlusMonth.replace(microsecond=0).isoformat() + 'Z' srcSasParam = srcblobservice.generate_shared_access_signature(container, filename, SharedAccessPolicy(AccessPolicy(None, todayPlusMonthISO, "r"), None)) srcUrl = srcblobservice.make_blob_url(container, filename, sas_token=srcSasParam) print srcUrl blobservice.copy_blob(container, filename, srcUrl)
def copyBlobToBlob(sourceUrl, sourceKey, destUrl, destKey): sourceParts = split_storage_url(sourceUrl) destParts = split_storage_url(destUrl) blobservice = BlobService(destParts[0], destKey) srcblobservice = BlobService(sourceParts[0], sourceKey) today = datetime.datetime.utcnow() todayPlusMonth = today + datetime.timedelta(1) todayPlusMonthISO = todayPlusMonth.replace(microsecond=0).isoformat() + 'Z' srcSasParam = srcblobservice.generate_shared_access_signature(sourceParts[2], sourceParts[3], SharedAccessPolicy(AccessPolicy(None, todayPlusMonthISO, "r"), None)) srcUrl = srcblobservice.make_blob_url(sourceParts[2], sourceParts[3], sas_token=srcSasParam) print destParts blobservice.copy_blob(destParts[2], destParts[3], srcUrl)
def get_container_details(creds, resource_group_name, account_name, container_name): keys = _get_storage_account_keys(creds, resource_group_name, account_name) blob_service = BlobService(account_name, keys.key1) model = StorageAccountContainerDetails() model.container_name = container_name model.sas_policy = _get_shared_access_policy(BlobSharedAccessPermissions.READ) model.blobs = [] for blob in blob_service.iterate_blobs(container_name, include="metadata"): sas_token = blob_service.generate_shared_access_signature(container_name, blob.name, model.sas_policy) blob.sas_url = blob_service.make_blob_url(container_name, blob.name, sas_token=sas_token) raw_md5 = b64decode(blob.properties.content_md5) hex_md5 = "".join([hex(val)[2:] for val in raw_md5]) blob.properties.content_hex_md5 = hex_md5 model.blobs.append(blob) return model
def get_container_details(creds, resource_group_name, account_name, container_name): keys = _get_storage_account_keys(creds, resource_group_name, account_name) blob_service = BlobService(account_name, keys.key1) model = StorageAccountContainerDetails() model.container_name = container_name model.sas_policy = _get_shared_access_policy(BlobSharedAccessPermissions.READ) model.blobs = [] for blob in blob_service.iterate_blobs(container_name, include='metadata'): sas_token = blob_service.generate_shared_access_signature(container_name, blob.name, model.sas_policy) blob.sas_url = blob_service.make_blob_url(container_name, blob.name, sas_token=sas_token) raw_md5 = b64decode(blob.properties.content_md5) hex_md5 = ''.join([hex(val)[2:] for val in raw_md5]) blob.properties.content_hex_md5 = hex_md5 model.blobs.append(blob) return model
def module_impl(rm, log, params, check_mode=False): if not HAS_AZURE: raise Exception("The Azure python sdk is not installed (try 'pip install azure')") if not HAS_REQUESTS: raise Exception("The requests python module is not installed (try 'pip install requests')") resource_group = params.get('resource_group') account_name = params.get('account_name') container_name = params.get('container_name') mode = params.get('mode') x_ms_meta_name_values = params.get('x_ms_meta_name_values') x_ms_blob_public_access = params.get('x_ms_blob_public_access') x_ms_blob_cache_control = params.get('x_ms_blob_cache_control') x_ms_blob_content_encoding = params.get('x_ms_blob_content_encoding') x_ms_blob_content_language = params.get('x_ms_blob_content_language') x_ms_blob_content_type = params.get('x_ms_blob_content_type') prefix = params.get('prefix') marker = params.get('marker') max_results = params.get('max_results') blob_name = params.get('blob_name') file_path = params.get('file_path') overwrite = params.get('overwrite') permissions = params.get('permissions') hours = params.get('hours') days = params.get('days') access_token = params.get('access_token') results = dict(changed=False) storage_client = rm.storage_client if not resource_group: raise Exception("Parameter error: resource_group cannot be None.") if not account_name: raise Exception("Parameter error: account_name cannot be None.") if not container_name: raise Exception("Parameter error: container_name cannot be None.") if not NAME_PATTERN.match(container_name): raise Exception("Parameter error: container_name must consist of lowercase letters, numbers and hyphens. It must begin with " + "a letter or number. It may not contain two consecutive hyphens.") # add file path validation results['account_name'] = account_name results['resource_group'] = resource_group results['container_name'] = container_name # put (upload), get (download), geturl (return download url (Ansible 1.3+), getstr (download object as string (1.3+)), list (list keys (2.0+)), create (bucket), delete (bucket), and delobj (delete object) try: log('Getting keys') keys = {} response = storage_client.storage_accounts.list_keys(resource_group, account_name) keys[KeyName.key1] = response.storage_account_keys.key1 keys[KeyName.key2] = response.storage_account_keys.key2 except AzureHttpError as e: log('Error getting keys for account %s' % account_name) raise Exception(str(e.message)) try: log('Create blob service') bs = BlobService(account_name, keys[KeyName.key1]) except Exception as e: log('Error creating blob service.') raise Exception(str(e.args[0])) if mode == 'create': container = get_container_facts(bs, container_name) if container is not None: # container exists results['container'] = container results['msg'] = "Container already exists." return results # create the container if not check_mode: log('Create container %s' % container_name) bs.create_container(container_name, x_ms_meta_name_values, x_ms_blob_public_access) results['container'] = get_container_facts(bs, container_name) results['msg'] = "Container created successfully." results['changed'] = True return results if mode == 'update': container = get_container_facts(bs, container_name) if container is None: # container does not exist if not check_mode: log('Create container %s' % container_name) bs.create_container(container_name, x_ms_meta_name_values, x_ms_blob_public_access) results['changed'] = True results['msg'] = 'Container created successfully.' return results # update existing container results['msg'] = "Container not changed." if x_ms_meta_name_values: if not check_mode: log('Update x_ms_meta_name_values for container %s' % container_name) bs.set_container_metadata(container_name, x_ms_meta_name_values) results['changed'] = True results['msg'] = 'Container meta data updated successfully.' if x_ms_blob_public_access: access = x_ms_blob_public_access if x_ms_blob_public_access == 'private': access = None if not check_mode: log('Set access to %s for container %s' % (access, container_name)) bs.set_container_acl(container_name=container_name, x_ms_blob_public_access=access) results['changed'] = True results['msg'] = 'Container ACL updated successfully.' if permissions: if hours == 0 and days == 0: raise Exception("Parameter error: expecting hours > 0 or days > 0") id = "%s-%s" % (container_name, permissions) si = get_identifier(id, hours, days, permissions) identifiers = SignedIdentifiers() identifiers.signed_identifiers.append(si) if not check_mode: log('Set permissions to %s for container %s' % (permissions, container_name)) bs.set_container_acl(container_name=container_name,signed_identifiers=identifiers) results['changed'] = True results['msg'] = 'Container ACL updated successfully.' results['container'] = get_container_facts(bs, container_name) return results if mode == 'delete': container = get_container_facts(bs, container_name) if container is None: results['msg'] = "Container %s could not be found." % container_name return results if not check_mode: log('Deleting container %s' % container_name) bs.delete_container(container_name) results['changed'] = True results['msg'] = 'Container deleted successfully.' return results if mode == 'delete_blob': if blob_name is None: raise Exception("Parameter error: blob_name cannot be None.") container = container_check(bs, container_name) blob = get_blob_facts(bs, container_name, blob_name) if not blob: results['msg'] = 'Blob %s could not be found in container %s.' % (blob_name, container_name) return results if not check_mode: log('Deleteing %s from container %s.' % (blob_name, container_name)) bs.delete_blob(container_name, blob_name) results['changed'] = True results['msg'] = 'Blob successfully deleted.' return results if mode == 'put': if not blob_name: raise Exception("Parameter error: blob_name cannot be None.") if not file_path : raise Exception("Parameter error: file_path cannot be None.") if not path_check(file_path): raise Exception("File %s does not exist." % file_path) container = get_container_facts(bs, container_name) blob = None if container is not None: blob = get_blob_facts(bs, container_name, blob_name) if container is not None and blob is not None: # both container and blob already exist md5_remote = blob['content-md5'] md5_local = get_md5(file_path) results['container'] = container results['blob'] = blob if md5_local == md5_remote: sum_matches = True results['msg'] = 'File checksums match. File not uploaded.' if overwrite == 'always': if not check_mode: log('Uploading %s to container %s.' % (file_path, container_name)) put_block_blob( bs, container_name, blob_name, file_path, x_ms_meta_name_values, x_ms_blob_cache_control, x_ms_blob_content_encoding, x_ms_blob_content_language, x_ms_blob_content_type ) results['blob'] = get_blob_facts(bs, container_name, blob_name) results['changed'] = True results['msg'] = 'File successfully uploaded.' else: sum_matches = False if overwrite in ('always', 'different'): if not check_mode: log('Uploading %s to container %s.' % (file_path, container_name)) put_block_blob( bs, container_name, blob_name, file_path, x_ms_meta_name_values, x_ms_blob_cache_control, x_ms_blob_content_encoding, x_ms_blob_content_language, x_ms_blob_content_type ) results['blob'] = get_blob_facts(bs, container_name, blob_name) results['changed'] = True results['msg'] = 'File successfully uploaded.' else: results['msg'] = "WARNING: Checksums do not match. Use overwrite parameter to force upload." return results if container is None: # container does not exist. create container and upload. if not check_mode: log('Creating container %s.' % container_name) bs.create_container(container_name, x_ms_meta_name_values, x_ms_blob_public_access) log('Uploading %s to container %s.' % (file_path, container_name)) put_block_blob( bs, container_name, blob_name, file_path, x_ms_meta_name_values, x_ms_blob_cache_control, x_ms_blob_content_encoding, x_ms_blob_content_language, x_ms_blob_content_type ) results['conainer'] = get_container_facts(bs, container_name) results['blob'] = get_blob_facts(bs, container_name, blob_name) results['changed'] = True results['msg'] = 'Successfully created container and uploaded file.' return results if container is not None: # container exists. just upload. if not check_mode: log('Uploading %s to container %s.' % (file_path, container_name)) put_block_blob( bs, container_name, blob_name, file_path, x_ms_meta_name_values, x_ms_blob_cache_control, x_ms_blob_content_encoding, x_ms_blob_content_language, x_ms_blob_content_type ) results['blob'] = get_blob_facts(bs, container_name, blob_name) results['changed'] = True results['msg'] = 'Successfully updloaded file.' return results if mode == 'list': container = container_check(bs, container_name) response = bs.list_blobs( container_name, prefix, marker, max_results ) results['blobs'] = [] for blob in response.blobs: b = dict( name = blob.name, snapshot = blob.snapshot, last_modified = blob.properties.last_modified, content_length = blob.properties.content_length, blob_type = blob.properties.blob_type, ) results['blobs'].append(b) return results if mode == 'get': if file_path is None: raise Exception("Parameter error: file_path cannot be None.") container = container_check(bs, container_name) blob = blob_check(bs, container_name, blob_name) path_exists = path_check(file_path) if not path_exists or overwrite == 'always': if not check_mode: bs.get_blob_to_path(container_name, blob_name, file_path) results['changed'] = True results['msg'] = "Blob %s successfully downloaded to %s." % (blob_name, file_path) return results if path_exists: md5_remote = blob['content-md5'] md5_local = get_md5(file_path) if md5_local == md5_remote: sum_matches = True if overwrite == 'always': if not check_mode: bs.get_blob_to_path(container_name, blob_name, file_path) results['changed'] = True results['msg'] = "Blob %s successfully downloaded to %s." % (blob_name, file_path) else: results['msg'] = "Local and remote object are identical, ignoring. Use overwrite parameter to force." else: sum_matches = False if overwrite in ('always', 'different'): if not check_mode: bs.get_blob_to_path(container_name, blob_name, file_path) results['changed'] = True results['msg'] = "Blob %s successfully downloaded to %s." % (blob_name, file_path) else: results['msg'] ="WARNING: Checksums do not match. Use overwrite parameter to force download." if sum_matches is True and overwrite == 'never': results['msg'] = "Local and remote object are identical, ignoring. Use overwrite parameter to force." return results if mode == 'get_url': if not blob_name: raise Exception("Parameter error: blob_name cannot be None.") container = container_check(bs, container_name) blob = blob_check(bs, container_name, blob_name) url = bs.make_blob_url( container_name=container_name, blob_name=blob_name, sas_token=access_token) results['url'] = url results['msg'] = "Url: %s" % url return results if mode == 'get_token': if hours == 0 and days == 0: raise Exception("Parameter error: expecting hours > 0 or days > 0") container = container_check(bs, container_name) blob = blob_check(bs, container_name, blob_name) results['blob_name'] = blob_name sap = get_shared_access_policy(permissions, hours=hours, days=days) token = bs.generate_shared_access_signature(container_name, blob_name, sap) results['access_token'] = token return results
example_file_path, ) # Create a new signed identifier (policy) si = SignedIdentifier() # Set the name si.id = policy_name # Set the expiration date si.access_policy.expiry = '2016-01-01' # Set the permissions. Read and List in this example si.access_policy.permission = ContainerSharedAccessPermissions.READ + ContainerSharedAccessPermissions.LIST # Get the existing signed identifiers (policies) for the container identifiers = blob_service.get_container_acl(storage_container_name) # And append the new one ot the list identifiers.signed_identifiers.append(si) # Set the container to the updated list of signed identifiers (policies) blob_service.set_container_acl( container_name=storage_container_name, signed_identifiers=identifiers, ) # Generate a new Shared Access Signature token using the sas_token = blob_service.generate_shared_access_signature( container_name=storage_container_name, shared_access_policy=SharedAccessPolicy(signed_identifier=policy_name), ) # Print out the new token print(sas_token)