def test_create_blob_client_with_sub_directory_path_in_blob_name(self): blob_url = "https://testaccount.blob.core.windows.net/containername/dir1/sub000/2010_Unit150_Ivan097_img0003.jpg" blob_client = BlobClient.from_blob_url(blob_url) self.assertEqual(blob_client.container_name, "containername") self.assertEqual(blob_client.blob_name, "dir1/sub000/2010_Unit150_Ivan097_img0003.jpg") blob_emulator_url = 'http://127.0.0.1:1000/devstoreaccount1/containername/dir1/sub000/2010_Unit150_Ivan097_img0003.jpg' blob_client = BlobClient.from_blob_url(blob_emulator_url) self.assertEqual(blob_client.container_name, "containername") self.assertEqual(blob_client.blob_name, "dir1/sub000/2010_Unit150_Ivan097_img0003.jpg")
def test_create_service_with_custom_account_endpoint_path(self): account_name = "blobstorage" account_key = "blobkey" custom_account_url = "http://local-machine:11002/custom/account/path/" + self.sas_token for service_type in SERVICES.items(): conn_string = 'DefaultEndpointsProtocol=http;AccountName={};AccountKey={};BlobEndpoint={};'.format( account_name, account_key, custom_account_url) # Act service = service_type[0].from_connection_string( conn_string, container_name="foo", blob_name="bar") # Assert self.assertEqual(service.account_name, account_name) self.assertEqual(service.credential.account_name, account_name) self.assertEqual(service.credential.account_key, account_key) self.assertEqual(service.primary_hostname, 'local-machine:11002/custom/account/path') service = BlobServiceClient(account_url=custom_account_url) self.assertEqual(service.account_name, None) self.assertEqual(service.credential, None) self.assertEqual(service.primary_hostname, 'local-machine:11002/custom/account/path') self.assertTrue(service.url.startswith('http://local-machine:11002/custom/account/path/?')) service = ContainerClient(account_url=custom_account_url, container_name="foo") self.assertEqual(service.account_name, None) self.assertEqual(service.container_name, "foo") self.assertEqual(service.credential, None) self.assertEqual(service.primary_hostname, 'local-machine:11002/custom/account/path') self.assertTrue(service.url.startswith('http://local-machine:11002/custom/account/path/foo?')) service = ContainerClient.from_container_url("http://local-machine:11002/custom/account/path/foo?query=value") self.assertEqual(service.account_name, None) self.assertEqual(service.container_name, "foo") self.assertEqual(service.credential, None) self.assertEqual(service.primary_hostname, 'local-machine:11002/custom/account/path') self.assertEqual(service.url, 'http://local-machine:11002/custom/account/path/foo') service = BlobClient(account_url=custom_account_url, container_name="foo", blob_name="bar", snapshot="baz") self.assertEqual(service.account_name, None) self.assertEqual(service.container_name, "foo") self.assertEqual(service.blob_name, "bar") self.assertEqual(service.snapshot, "baz") self.assertEqual(service.credential, None) self.assertEqual(service.primary_hostname, 'local-machine:11002/custom/account/path') self.assertTrue(service.url.startswith('http://local-machine:11002/custom/account/path/foo/bar?snapshot=baz&')) service = BlobClient.from_blob_url("http://local-machine:11002/custom/account/path/foo/bar?snapshot=baz&query=value") self.assertEqual(service.account_name, None) self.assertEqual(service.container_name, "foo") self.assertEqual(service.blob_name, "bar") self.assertEqual(service.snapshot, "baz") self.assertEqual(service.credential, None) self.assertEqual(service.primary_hostname, 'local-machine:11002/custom/account/path') self.assertEqual(service.url, 'http://local-machine:11002/custom/account/path/foo/bar?snapshot=baz')
async def _test_auth_blob_url_async(self): # [START create_blob_client] from azure.storage.blob.aio import BlobClient blob_client = BlobClient.from_blob_url(blob_url="https://account.blob.core.windows.net/container/blob-name") # [END create_blob_client] # [START create_blob_client_sas_url] from azure.storage.blob.aio import BlobClient sas_url = "https://account.blob.core.windows.net/container/blob-name?sv=2015-04-05&st=2015-04-29T22%3A18%3A26Z&se=2015-04-30T02%3A23%3A26Z&sr=b&sp=rw&sip=168.1.5.60-168.1.5.70&spr=https&sig=Z%2FRHIX5Xcg0Mq2rqI3OlWTjEg2tYkboXr1P9ZUXDtkk%3D" blob_client = BlobClient.from_blob_url(sas_url)
async def archive(self, payload: Payload, request: Request) -> ArchiverResponse: """ Archive payload to Azure Blob Storage """ if self.use_sha: filename = hashlib.sha1(payload.content).hexdigest() filename = f'{"/".join(list(filename[:5]))}/{filename}' elif self.use_datetime: datetime_path = datetime.now().strftime('%Y/%m/%d') filename = f'{datetime_path}/{payload.payload_id}' else: filename = payload.results.payload_id blob_client: BlobClient = BlobClient.from_connection_string( conn_str=self.conn_str, container_name=self.archive_container, blob_name=filename, ) try: await blob_client.upload_blob(payload.content) except ResourceExistsError: pass await blob_client.close() return ArchiverResponse({ 'container_name': self.archive_container, 'blob_name': filename })
async def _test_shared_access_container(self): # SAS URL is calculated from storage key, so this test runs live only if TestMode.need_recording_file(self.test_mode): return # Arrange container = await self._create_container() blob_name = 'blob1' data = b'hello world' blob = container.get_blob_client(blob_name) await blob.upload_blob(data) token = container.generate_shared_access_signature( expiry=datetime.utcnow() + timedelta(hours=1), permission=ContainerPermissions.READ, ) blob = BlobClient(blob.url, credential=token) # Act response = requests.get(blob.url) # Assert self.assertTrue(response.ok) self.assertEqual(data, response.content)
def __init__( self, account_url, # type: str file_system_name, # type: str path_name, # type: str credential=None, # type: Optional[Any] **kwargs # type: Any ): # type: (...) -> None kwargs['retry_policy'] = kwargs.get('retry_policy') or ExponentialRetry(**kwargs) super(PathClient, self).__init__(account_url, # pylint: disable=specify-parameter-names-in-call file_system_name, path_name, credential=credential, **kwargs) # type: ignore kwargs.pop('_hosts', None) self._blob_client = BlobClient(account_url=self._blob_account_url, container_name=file_system_name, blob_name=path_name, credential=credential, _hosts=self._blob_client._hosts, # pylint: disable=protected-access **kwargs) self._client = DataLakeStorageClient(self.url, file_system_name, path_name, pipeline=self._pipeline) self._datalake_client_for_blob_operation = DataLakeStorageClient(self._blob_client.url, file_system_name, path_name, pipeline=self._pipeline) self._loop = kwargs.get('loop', None)
async def main(event: func.EventGridEvent): result = json.dumps({ 'id': event.id, 'data': event.get_json(), 'topic': event.topic, 'subject': event.subject, 'event_type': event.event_type, }) logging.info('Python EventGrid trigger processed an event: %s', result) try: result = json.loads(result) data = result['data'] url = urlparse(data['url']) p_file = Path(url.path) filename = p_file.name container = p_file.parent.name connection = os.environ['STORAGE_CONNECTION'] async with BlobClient.from_connection_string( connection, container_name=container, blob_name=filename) as blob: stream = await blob.download_blob() data = await stream.content_as_text() logging.info(f'Content is {data}') except Exception as e: logging.exception(f"failed to load EventGrid request:{e}")
async def set_blob_metadata(blob_url: str, metadata: Dict[str, str]): """Sets the provided dictionary as the metadata on the Azure blob""" blob_client = BlobClient.from_blob_url(blob_url) logger.info(f"Setting blob properties '{blob_client.blob_name}'" + f"from container '{blob_client.container_name}' on account:" + f"'{blob_client.account_name}'") return await blob_client.set_blob_metadata(metadata=metadata)
async def _setup(self, bsc): self.container_name = self.get_resource_name('utcontainer') if self.is_live: try: # create source blob to be copied from self.source_blob_name = self.get_resource_name('srcblob') self.source_blob_data = self.get_random_bytes(4 * 1024) source_blob = bsc.get_blob_client(self.container_name, self.source_blob_name) await bsc.create_container(self.container_name) await source_blob.upload_blob(self.source_blob_data) # generate a SAS so that it is accessible with a URL sas_token = generate_blob_sas( source_blob.account_name, source_blob.container_name, source_blob.blob_name, snapshot=source_blob.snapshot, account_key=source_blob.credential.account_key, permission=BlobSasPermissions(read=True), expiry=datetime.utcnow() + timedelta(hours=1), ) sas_source = BlobClient.from_blob_url(source_blob.url, credential=sas_token) self.source_blob_url = sas_source.url except: pass
def setUp(self): super(StorageBlockBlobTestAsync, self).setUp() url = self._get_account_url() credential = self._get_shared_key_credential() # test chunking functionality by reducing the size of each chunk, # otherwise the tests would take too long to execute self.bsc = BlobServiceClient( url, credential=credential, connection_data_block_size=4 * 1024, max_single_put_size=32 * 1024, max_block_size=4 * 1024, transport=AiohttpTestTransport()) self.config = self.bsc._config self.container_name = self.get_resource_name('utcontainer') # create source blob to be copied from self.source_blob_name = self.get_resource_name('srcblob') self.source_blob_data = self.get_random_bytes(SOURCE_BLOB_SIZE) blob = self.bsc.get_blob_client(self.container_name, self.source_blob_name) # generate a SAS so that it is accessible with a URL sas_token = blob.generate_shared_access_signature( permission=BlobSasPermissions(read=True), expiry=datetime.utcnow() + timedelta(hours=1), ) self.source_blob_url = BlobClient.from_blob_url(blob.url, credential=sas_token).url
def test_blob_client_api_version_property(self): blob_client = BlobClient("https://foo.blob.core.windows.net/account", self.container_name, self._get_blob_reference(), credential="fake_key", api_version=self.api_version_1) self.assertEqual(blob_client.api_version, self.api_version_1) self.assertEqual(blob_client._client._config.version, self.api_version_1) blob_client = BlobClient("https://foo.blob.core.windows.net/account", self.container_name, self._get_blob_reference(), credential="fake_key") self.assertEqual(blob_client.api_version, self.api_version_2) self.assertEqual(blob_client._client._config.version, self.api_version_2)
def __init__(self): id = uuid.uuid1() connectionString = os.environ["STORAGE_CONNECTION_STRING"] self.blob = BlobClient.from_connection_string( conn_str=connectionString, container_name="mycontainer", blob_name="pyTestBlob-" + id.hex + ".txt", )
async def download_blob_using_blobclient(account_name: str, credential:DefaultAzureCredential, container_name:str , blob_name: str, file_stream: io.BytesIO): try: blob_client = BlobClient(f"{account_name}.blob.core.windows.net", credential=credential, container_name=container_name, blob_name=blob_name, connection_timeout=1, read_timeout=1) storage_stream_downloader = await blob_client.download_blob() await storage_stream_downloader.readinto(file_stream) return except ResourceNotFoundError: raise KeyError(blob_name) except ClientAuthenticationError: raise
def test_create_blob_client_with_complete_blob_url_async(self, resource_group, location, storage_account, storage_account_key): # Arrange blob_url = self.account_url(storage_account, "blob") + "/foourl/barurl" service = BlobClient(blob_url, credential=storage_account_key, container_name='foo', blob_name='bar') # Assert self.assertEqual(service.scheme, 'https') self.assertEqual(service.container_name, 'foo') self.assertEqual(service.blob_name, 'bar') self.assertEqual(service.account_name, storage_account.name)
async def download_blob_properties(blob_url: str) -> Dict[str, str]: """Downloads the blob properties from Azure for the given blob URI""" blob_client = BlobClient.from_blob_url(blob_url) logger.info(f"Downloading blob properties '{blob_client.blob_name}'" + f"from container '{blob_client.container_name}'" + f"on account: '{blob_client.account_name}'") response = await blob_client.get_blob_properties() logger.debug(response) return response
async def save(self, response: StoqResponse) -> None: """ Save response as Azure Blob Storage """ blob_client: BlobClient = BlobClient.from_connection_string( conn_str=self.conn_str, container_name=self.results_container, blob_name=response.scan_id, ) await blob_client.upload_blob(dumps(response)) await blob_client.close()
async def add_file_and_metadata_to_blob_storage( file_name, file_contents, file_content_type, github_user_id, github_user_name, title, badge, description, ): """Adds uploaded files to blob storage with metadata.""" extension = file_name.split(".")[-1].lower() file_uuid_str = str(uuid.uuid4()) new_filename = f"{file_uuid_str}" # ".{extension}" blob_client = BlobClient( account_url=f"https://{os.getenv('AZURE_STORAGE_ACCOUNT')}.blob.core.windows.net/", credential=os.getenv("AZURE_STORAGE_KEY"), container_name=os.getenv("AZURE_STORAGE_VIDEO_CONTAINER"), blob_name=new_filename, ) response = await blob_client.upload_blob( file_contents, metadata={ "original_file_name": file_name, # TODO: Make this a real title "uuid": file_uuid_str, "uploader_username": github_user_name, "uploader_id": github_user_id, "title": title, "badge": clean_badge(badge), "description": description, }, content_settings=ContentSettings(content_type=file_content_type), ) sas_token = generate_blob_sas( account_name=os.getenv("AZURE_STORAGE_ACCOUNT"), account_key=os.getenv("AZURE_STORAGE_KEY"), container_name=os.getenv("AZURE_STORAGE_VIDEO_CONTAINER"), blob_name=blob_client.blob_name, permission=BlobSasPermissions(read=True), expiry=datetime.utcnow() + timedelta(minutes=15), ) sas_url = f"{blob_client.url}?{sas_token}" video_indexer = app.state.video_indexer async with await video_indexer.upload_video_from_url( file_uuid_str, # TODO: Make this a real title file_uuid_str, "https://teamsvid.azurewebsites.net/video_processed_callback", sas_url, ) as response: response_json = await response.json() await blob_client.close()
def test_create_blob_client_with_complete_blob_url_async(self): # Arrange blob_url = self._get_account_url() + "/foourl/barurl" service = BlobClient(blob_url, credential=self.account_key, container_name='foo', blob_name='bar') # Assert self.assertEqual(service.scheme, 'https') self.assertEqual(service.container_name, 'foo') self.assertEqual(service.blob_name, 'bar') self.assertEqual(service.account_name, self.account_name)
async def upload_file(container, filename, cs): print('Uploading Blob Start') blob = BlobClient.from_connection_string( conn_str=cs, container_name=container, blob_name=os.path.basename(filename)) with open(filename, "rb") as data: await blob.upload_blob(data) print('Uploading Blob End') os.remove(filename)
async def download_blob(blob_url: str) -> Any: """ Downloads the given blob from the container. """ blob_client = BlobClient.from_blob_url(blob_url) logger.info(f"Downloading blob '{blob_client.blob_name}'" + f"from container '{blob_client.container_name}'" + f"on account: '{blob_client.account_name}'") response = await (await blob_client.download_blob()).readall() logger.debug(response) await blob_client.close() return response
async def get(self, task: ArchiverResponse) -> Payload: """ Retrieve archived payload from Azure Blob Storage """ blob_client: BlobClient = BlobClient.from_connection_string( conn_str=self.conn_str, container_name=task.results['container_name'], blob_name=task.results['blob_name'], ) content = await blob_client.download_blob() await blob_client.close() meta = PayloadMeta(task.results) return Payload(content.readall(), meta)
def __init__(self, container: str, path: str = str(), connection_string: str = STORAGE_CONNECTION_STRING, content_type: Union[str, None] = DEFAULT_CONTENT_TYPE, cache_control: str = DEFAULT_CACHE_CONTROL, compressed: bool = True, content_disposition: Union[str, None] = None, content_language: Union[str, None] = CONTENT_LANGUAGE, tier: str = 'Hot', **kwargs): self.path = path self.compressed = compressed self._connection_string = connection_string self.container = container self._tier = getattr(StandardBlobTier, tier, None) self._lock = None if self._tier is None: raise ValueError( "Tier must be one of 'Hot', 'Cool' or 'Archive'. " "Got <%r> instead." % tier ) self._content_settings: ContentSettings = ContentSettings( content_type=content_type, cache_control=cache_control, content_encoding="gzip" if self.compressed else None, content_language=content_language, content_disposition=content_disposition, **kwargs ) self.client: AsyncBlobClient = AsyncBlobClient.from_connection_string( conn_str=connection_string, container_name=container, blob_name=path, # retry_to_secondary=True, connection_timeout=60, max_block_size=8 * 1024 * 1024, max_single_put_size=256 * 1024 * 1024, min_large_block_upload_threshold=8 * 1024 * 1024 + 1 ) # self.client.blob_name self.account_name = self.client.account_name self.target = self.client.primary_hostname self.url = "{}://{}/{}/{}".format( self.client.scheme, self.client.primary_hostname, quote(self.container), quote(self.path, safe='~/'), )
async def _setup(self): blob = self.bsc.get_blob_client(self.container_name, self.source_blob_name) if not self.is_playback(): try: await self.bsc.create_container(self.container_name) except: pass await blob.upload_blob(self.source_blob_data, overwrite=True) # generate a SAS so that it is accessible with a URL sas_token = blob.generate_shared_access_signature( permission=BlobSasPermissions(read=True), expiry=datetime.utcnow() + timedelta(hours=1), ) self.source_blob_url = BlobClient.from_blob_url(blob.url, credential=sas_token).url
async def _test_create_container_with_public_access_blob(self): # Arrange container_name = self._get_container_reference() # Act container = self.bsc.get_container_client(container_name) created = await container.create_container(public_access='blob') blob = container.get_blob_client("blob1") await blob.upload_blob(u'xyz') anonymous_service = BlobClient(self._get_account_url(), container=container_name, blob="blob1") # Assert self.assertTrue(created) await anonymous_service.download_blob()
async def main(req: func.HttpRequest) -> func.HttpResponse: try: logging.info('Python HTTP trigger function processed a request.') container = req.params.get('container') filename = req.params.get('file') if not filename: try: req_body = req.get_json() except ValueError: pass else: container = req_body.get('container') filename = req_body.get('file') # Get Credentials from Function App global msi global expire_datetime if msi is None or expire_datetime < dt.datetime.now(): msi = DefaultAzureCredential() expire_datetime = dt.datetime.now() + dt.timedelta(hours=1) logging.info( f"Acquired token for Storage Account. Will expire at {expire_datetime.strftime('%Y/%m/%d %H:%M:%S')}" ) STORAGE_NAME = os.environ.get('STORAGE_NAME') # Download file with BlobClient async with BlobClient(STORAGE_NAME, container, filename, credential=msi) as blob: stream = await blob.download_blob() data = await stream.content_as_text() logging.info(f'finishied download file. insde data is "{data}"') return func.HttpResponse( f"finished download blob {filename} from {container}") except Exception as e: logging.exception(f'Failed to download file:{e}') return func.HttpResponse( "Please pass a container and file on the query string or in the request body", status_code=400)
def upload_blob_file(connection,container_name,local_file_name, full_path): """ Upload local file to cloud. :param Object connection: Azure connection instance :param str container_name: Azure Container name. :param str local_file_name: File name to upload. :param str full_path: Path file that will be uploaded. :return: True if upload works and False if something went wrong. :rtype: Boolean """ try: blob = BlobClient.from_connection_string(conn_str=connection, container_name=container_name, blob_name=local_file_name) with open(full_path,'rb') as data: blob.upload_blob(data) print('File {0} uploaded'.format(local_file_name)) return True except ValueError: print("Something went wrong") return False
async def _test_auth_connection_string_async(self): # [START auth_from_connection_string] from azure.storage.blob.aio import BlobServiceClient blob_service_client = BlobServiceClient.from_connection_string(self.connection_string) # [END auth_from_connection_string] # [START auth_from_connection_string_container] from azure.storage.blob.aio import ContainerClient container_client = ContainerClient.from_connection_string( self.connection_string, container_name="mycontainer") # [END auth_from_connection_string_container] # [START auth_from_connection_string_blob] from azure.storage.blob.aio import BlobClient blob_client = BlobClient.from_connection_string( self.connection_string, container_name="mycontainer", blob_name="blobname.txt") # [END auth_from_connection_string_blob] # Get account information for the Blob Service account_info = await blob_service_client.get_account_information() assert account_info is not None
async def _setup(self): if not self.is_playback(): try: # create source blob to be copied from self.source_blob_name = self.get_resource_name('srcblob') self.source_blob_data = self.get_random_bytes(4 * 1024) source_blob = self.bsc.get_blob_client(self.container_name, self.source_blob_name) await self.bsc.create_container(self.container_name) await source_blob.upload_blob(self.source_blob_data) # generate a SAS so that it is accessible with a URL sas_token = source_blob.generate_shared_access_signature( permission=BlobPermissions.READ, expiry=datetime.utcnow() + timedelta(hours=1), ) sas_source = BlobClient(source_blob.url, credential=sas_token) self.source_blob_url = sas_source.url except: pass
def download_blob_file(connection,container_name,blob_name,path_output): """ Get blob file from cloud and downloads on local machine. :param Object connection: Azure connection instance :param str container_name: Azure Container name. :param str blob_name: Azure Blob name to download. :param str blob_name: Path where file will be saved. :return: True if download works and False if something went wrong. :rtype: Boolean """ try: blob = BlobClient.from_connection_string(conn_str=connection, container_name=container_name, blob_name=blob_name) with open(path_output,'wb') as myblob: blob_data = blob.download_blob() blob_data.readinto(myblob) print('File saved in {0}'.format(path_output)) return True except ValueError: print("Something went wrong") return False
async def _setup(self, storage_account, key): # test chunking functionality by reducing the size of each chunk, # otherwise the tests would take too long to execute self.bsc = BlobServiceClient(self.account_url(storage_account, "blob"), credential=key, connection_data_block_size=4 * 1024, max_single_put_size=32 * 1024, max_block_size=4 * 1024, transport=AiohttpTestTransport()) self.config = self.bsc._config self.container_name = self.get_resource_name('utcontainer') # create source blob to be copied from self.source_blob_name = self.get_resource_name('srcblob') self.source_blob_data = self.get_random_bytes(SOURCE_BLOB_SIZE) blob = self.bsc.get_blob_client(self.container_name, self.source_blob_name) if self.is_live: try: await self.bsc.create_container(self.container_name) except: pass await blob.upload_blob(self.source_blob_data, overwrite=True) # generate a SAS so that it is accessible with a URL sas_token = generate_blob_sas( blob.account_name, blob.container_name, blob.blob_name, snapshot=blob.snapshot, account_key=blob.credential.account_key, permission=BlobSasPermissions(read=True), expiry=datetime.utcnow() + timedelta(hours=1), ) self.source_blob_url = BlobClient.from_blob_url( blob.url, credential=sas_token).url self.source_blob_url_without_sas = blob.url