Beispiel #1
0
    def _setup(self, storage_account, key, container_prefix='utcontainer'):
        account_url = self.account_url(storage_account, "blob")
        if not isinstance(account_url, str):
            account_url = account_url.encode('utf-8')
            key = key.encode('utf-8')
        self.bsc = BlobServiceClient(account_url,
                                     credential=key,
                                     connection_data_block_size=4 * 1024,
                                     max_single_put_size=32 * 1024,
                                     max_block_size=4 * 1024)
        self.config = self.bsc._config
        self.container_name = self.get_resource_name(container_prefix)

        # create source blob to be copied from
        self.source_blob_name = self.get_resource_name('srcblob')
        self.source_blob_name_with_special_chars = 'भारत¥test/testsubÐirÍ/' + self.get_resource_name(
            'srcÆblob')
        self.source_blob_data = self.get_random_bytes(SOURCE_BLOB_SIZE)
        self.source_blob_with_special_chars_data = self.get_random_bytes(
            SOURCE_BLOB_SIZE)

        blob = self.bsc.get_blob_client(self.container_name,
                                        self.source_blob_name)
        blob_with_special_chars = self.bsc.get_blob_client(
            self.container_name, self.source_blob_name_with_special_chars)

        if self.is_live:
            self.bsc.create_container(self.container_name)
            blob.upload_blob(self.source_blob_data)
            blob_with_special_chars.upload_blob(
                self.source_blob_with_special_chars_data)

        # generate a SAS so that it is accessible with a URL
        sas_token = generate_blob_sas(
            blob.account_name,
            blob.container_name,
            blob.blob_name,
            snapshot=blob.snapshot,
            account_key=blob.credential.account_key,
            permission=BlobSasPermissions(read=True),
            expiry=datetime.utcnow() + timedelta(hours=1),
        )
        # generate a SAS so that it is accessible with a URL
        sas_token_for_special_chars = generate_blob_sas(
            blob_with_special_chars.account_name,
            blob_with_special_chars.container_name,
            blob_with_special_chars.blob_name,
            snapshot=blob_with_special_chars.snapshot,
            account_key=blob_with_special_chars.credential.account_key,
            permission=BlobSasPermissions(read=True),
            expiry=datetime.utcnow() + timedelta(hours=1),
        )
        self.source_blob_url_without_sas = blob.url
        self.source_blob_url = BlobClient.from_blob_url(
            blob.url, credential=sas_token).url
        self.source_blob_url_with_special_chars = BlobClient.from_blob_url(
            blob_with_special_chars.url,
            credential=sas_token_for_special_chars).url
    def auth_blob_url(self):
        # [START create_blob_client]
        from azure.storage.blob import BlobClient
        blob_client = BlobClient.from_blob_url(blob_url="https://account.blob.core.windows.net/container/blob-name")
        # [END create_blob_client]

        # [START create_blob_client_sas_url]
        from azure.storage.blob import BlobClient

        sas_url = "https://account.blob.core.windows.net/container/blob-name?sv=2015-04-05&st=2015-04-29T22%3A18%3A26Z&se=2015-04-30T02%3A23%3A26Z&sr=b&sp=rw&sip=168.1.5.60-168.1.5.70&spr=https&sig=Z%2FRHIX5Xcg0Mq2rqI3OlWTjEg2tYkboXr1P9ZUXDtkk%3D"
        blob_client = BlobClient.from_blob_url(sas_url)
Beispiel #3
0
    def _azure_upload_file(self, credentials, local_file, artifact_path):
        """
        Uploads a file to a given Azure storage location.

        The function uses a file chunking generator with 100 MB being the size limit for each chunk.
        This limit is imposed by the stage_block API in azure-storage-blob.
        In the case the file size is large and the upload takes longer than the validity of the
        given credentials, a new set of credentials are generated and the operation continues. This
        is the reason for the first nested try-except block

        Finally, since the prevailing credentials could expire in the time between the last
        stage_block and the commit, a second try-except block refreshes credentials if needed.
        """
        from azure.core.exceptions import ClientAuthenticationError
        from azure.storage.blob import BlobClient

        try:
            headers = self._extract_headers_from_credentials(
                credentials.headers)
            service = BlobClient.from_blob_url(blob_url=credentials.signed_uri,
                                               credential=None,
                                               headers=headers)
            uploading_block_list = list()
            for chunk in yield_file_in_chunks(local_file,
                                              _AZURE_MAX_BLOCK_CHUNK_SIZE):
                block_id = base64.b64encode(uuid.uuid4().hex.encode())
                try:
                    service.stage_block(block_id, chunk, headers=headers)
                except ClientAuthenticationError:
                    _logger.warning(
                        "Failed to authorize request, possibly due to credential expiration."
                        "Refreshing credentials and trying again..")
                    credentials = self._get_write_credentials(
                        self.run_id, artifact_path).credentials.signed_uri
                    service = BlobClient.from_blob_url(blob_url=credentials,
                                                       credential=None)
                    service.stage_block(block_id, chunk, headers=headers)
                uploading_block_list.append(block_id)
            try:
                service.commit_block_list(uploading_block_list,
                                          headers=headers)
            except ClientAuthenticationError:
                _logger.warning(
                    "Failed to authorize request, possibly due to credential expiration."
                    "Refreshing credentials and trying again..")
                credentials = self._get_write_credentials(
                    self.run_id, artifact_path).credentials.signed_uri
                service = BlobClient.from_blob_url(blob_url=credentials,
                                                   credential=None)
                service.commit_block_list(uploading_block_list,
                                          headers=headers)
        except Exception as err:
            raise MlflowException(err)
Beispiel #4
0
    def test_create_blob_client_with_sub_directory_path_in_blob_name(self):
        blob_url = "https://testaccount.blob.core.windows.net/containername/dir1/sub000/2010_Unit150_Ivan097_img0003.jpg"
        blob_client = BlobClient.from_blob_url(blob_url)
        self.assertEqual(blob_client.container_name, "containername")
        self.assertEqual(blob_client.blob_name,
                         "dir1/sub000/2010_Unit150_Ivan097_img0003.jpg")

        blob_emulator_url = 'http://127.0.0.1:1000/devstoreaccount1/containername/dir1/sub000/2010_Unit150_Ivan097_img0003.jpg'
        blob_client = BlobClient.from_blob_url(blob_emulator_url)
        self.assertEqual(blob_client.container_name, "containername")
        self.assertEqual(blob_client.blob_name,
                         "dir1/sub000/2010_Unit150_Ivan097_img0003.jpg")
async def store_blob(blob_info, file_name):
    try:
        sas_url = "https://{}/{}/{}{}".format(blob_info["hostName"],
                                              blob_info["containerName"],
                                              blob_info["blobName"],
                                              blob_info["sasToken"])

        screen.logOK(
            "\nUploading file: {} to Azure Storage as blob: {} in container {}\n"
            .format(file_name, blob_info["blobName"],
                    blob_info["containerName"]))

        # Upload the specified file
        with BlobClient.from_blob_url(sas_url) as blob_client:
            with open(file_name, "rb") as f:
                result = blob_client.upload_blob(f, overwrite=True)
                return (True, result)

    except FileNotFoundError as ex:
        # catch file not found and add an HTTP status code to return in notification to IoT Hub
        ex.status_code = 404
        screen.logFatal(ex)
        return (False, ex)

    except AzureError as ex:
        # catch Azure errors that might result from the upload operation
        screen.logFatal(ex)
        return (False, ex)
Beispiel #6
0
    def test_shared_access_container(self):
        # SAS URL is calculated from storage key, so this test runs live only
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        container = self._create_container()
        blob_name = 'blob1'
        data = b'hello world'

        blob = container.get_blob_client(blob_name)
        blob.upload_blob(data)

        token = generate_container_sas(
            container.account_name,
            container.container_name,
            account_key=container.credential.account_key,
            expiry=datetime.utcnow() + timedelta(hours=1),
            permission=ContainerSasPermissions(read=True),
        )
        blob = BlobClient.from_blob_url(blob.url, credential=token)

        # Act
        response = requests.get(blob.url)

        # Assert
        self.assertTrue(response.ok)
        self.assertEqual(data, response.content)
Beispiel #7
0
def set_blob_metadata(blob_url: str, metadata: Dict[str, str]):
    """Sets the provided dictionary as the metadata on the Azure blob"""
    blob_client = BlobClient.from_blob_url(blob_url)
    logger.info(f"Setting blob properties '{blob_client.blob_name}'" +
                f"from container '{blob_client.container_name}' on account:" +
                f"'{blob_client.account_name}'")
    return blob_client.set_blob_metadata(metadata=metadata)
Beispiel #8
0
    def get_results(self):
        if not self.results is None:
            return self.results

        if not self.has_completed():
            self.wait_until_completed()

        if not self.details.status == "Succeeded":
            raise RuntimeError(
                f"Cannot retrieve results as job execution failed (status: {self.details.status}. error: {self.details.error_data})"
            )

        url = urlparse(self.details.output_data_uri)
        if url.query.find('se=') == -1:
            # output_data_uri does not contains SAS token, get sas url from service
            blob_client = BlobClient.from_blob_url(
                self.details.output_data_uri)
            blob_uri = self.workspace._get_linked_storage_sas_uri(
                blob_client.container_name, blob_client.blob_name)
            payload = download_blob(blob_uri)
        else:
            # output_data_uri contains SAS token, use it
            payload = download_blob(self.details.output_data_uri)

        result = json.loads(payload.decode('utf8'))
        return result
Beispiel #9
0
 def uploadFileWithBlobSasUrl(self, blobSasUrl, file_name_full_path):
     retryRemaining = 3
     while retryRemaining > 0:
         try:
             self.telemetryLogger.info("Cloud Environment: " +
                                       self.cloudEnv)
             blob_client = BlobClient.from_blob_url(blobSasUrl)
             self.telemetryLogger.info('Uploading to Blob starting.')
             start_time = datetime.now()
             with open(file_name_full_path, "rb") as data:
                 blob_client.upload_blob(data,
                                         blob_type='BlockBlob',
                                         overwrite=True)
             self.telemetryLogger.info(
                 'Uploading to Blob completed. Time take: ' +
                 str((datetime.now() - start_time).total_seconds() * 1000) +
                 ' ms')
             break
         except Exception as ex:
             retryRemaining -= 1
             if retryRemaining <= 0:
                 self.telemetryLogger.error(
                     'Encountered error during blob upload multiple times after exhausting all retry attempts'
                 )
                 raise BlobUploadException(ex)
             exMessage = str(ex)
             self.telemetryLogger.warning(
                 'Encountered error during blob upload: ' + exMessage)
             self.telemetryLogger.warning('Retrying. ' +
                                          str(retryRemaining) +
                                          ' attempt(s) remaining.')
    def get_blob(self, sas_uri):
        blob_client = BlobClient.from_blob_url(sas_uri)
        download_stream = blob_client.download_blob()

        with io.BytesIO() as output_stream:
            download_stream.readinto(output_stream)
            return output_stream
    def test_user_delegation_sas_for_container(self):
        # SAS URL is calculated from storage key, so this test runs live only
        pytest.skip("Current Framework Cannot Support OAUTH")
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        token_credential = self.generate_oauth_token()
        service_client = BlobServiceClient(self._get_oauth_account_url(),
                                           credential=token_credential)
        user_delegation_key = service_client.get_user_delegation_key(
            datetime.utcnow(),
            datetime.utcnow() + timedelta(hours=1))

        container_client = service_client.create_container(
            self.get_resource_name('oauthcontainer'))
        token = container_client.generate_shared_access_signature(
            expiry=datetime.utcnow() + timedelta(hours=1),
            permission=ContainerSasPermissions(read=True),
            user_delegation_key=user_delegation_key,
            account_name='emilydevtest')

        blob_client = container_client.get_blob_client(
            self.get_resource_name('oauthblob'))
        blob_content = self.get_random_text_data(1024)
        blob_client.upload_blob(blob_content, length=len(blob_content))

        # Act
        new_blob_client = BlobClient.from_blob_url(blob_client.url,
                                                   credential=token)
        content = new_blob_client.download_blob()

        # Assert
        self.assertEqual(blob_content, b"".join(list(content)).decode('utf-8'))
Beispiel #12
0
    def _setup(self, storage_account, key):
        self.bsc = BlobServiceClient(self.account_url(storage_account, "blob"),
                                     credential=key,
                                     connection_data_block_size=4 * 1024,
                                     max_single_put_size=32 * 1024,
                                     max_block_size=4 * 1024)
        self.config = self.bsc._config
        self.container_name = self.get_resource_name('utcontainer')

        # create source blob to be copied from
        self.source_blob_name = self.get_resource_name('srcblob')
        self.source_blob_data = self.get_random_bytes(SOURCE_BLOB_SIZE)

        blob = self.bsc.get_blob_client(self.container_name,
                                        self.source_blob_name)
        if self.is_live:
            self.bsc.create_container(self.container_name)
            blob.upload_blob(self.source_blob_data)

        # generate a SAS so that it is accessible with a URL
        sas_token = generate_blob_sas(
            blob.account_name,
            blob.container_name,
            blob.blob_name,
            snapshot=blob.snapshot,
            account_key=blob.credential.account_key,
            permission=BlobSasPermissions(read=True),
            expiry=datetime.utcnow() + timedelta(hours=1),
        )
        self.source_blob_url = BlobClient.from_blob_url(
            blob.url, credential=sas_token).url
Beispiel #13
0
    def upload_file(self, file_name):
        #if(type(file_name)!=type(str)):
        #    return False, -1

        name_postfix = datetime.datetime.now().strftime('%Y_%m_%d_%H%M%S')
        create_name = 'canbus' + str(name_postfix) + '.log'
        blob_info = self.client.get_storage_info_for_blob(create_name)
        print("blob infor get")
        self.sas_url = "https://{}/{}/{}{}".format(blob_info["hostName"],
                                                   blob_info["containerName"],
                                                   blob_info["blobName"],
                                                   blob_info["sasToken"])
        try:
            logger.info(
                "\nUploading file: {} to Azure Storage as blob: {} in container {}\n"
                .format(file_name, blob_info["blobName"],
                        blob_info["containerName"]))
            chunk_size = 4 * 1024 * 1024
            # Upload the specified file
            with BlobClient.from_blob_url(self.sas_url) as self.blob_client:
                with open(file_name, "rb") as stream:
                    self.blob_client.create_append_blob()
                    while True:
                        read_data = stream.read(chunk_size)
                        if not read_data:
                            logger.info("file " + create_name +
                                        "upload successful")
                            break
                        result = self.blob_client.append_block(read_data)
                    return (True, result)
        except Exception as e:
            logger.error("Unable to upload file" + str(e))
            return False, 0
Beispiel #14
0
    def _setup(self, bsc):
        self.container_name = self.get_resource_name('utcontainer')

        # create source blob to be copied from
        self.source_blob_name = self.get_resource_name('srcblob')
        self.source_blob_data = self.get_random_bytes(4 * 1024)
        source_blob = bsc.get_blob_client(self.container_name,
                                          self.source_blob_name)

        if self.is_live:
            bsc.create_container(self.container_name)
            source_blob.upload_blob(self.source_blob_data)

        # generate a SAS so that it is accessible with a URL
        sas_token = generate_blob_sas(
            source_blob.account_name,
            source_blob.container_name,
            source_blob.blob_name,
            snapshot=source_blob.snapshot,
            account_key=source_blob.credential.account_key,
            permission=BlobSasPermissions(read=True),
            expiry=datetime.utcnow() + timedelta(hours=1),
        )
        sas_source = BlobClient.from_blob_url(source_blob.url,
                                              credential=sas_token)
        self.source_blob_url = sas_source.url
Beispiel #15
0
    def _store_blob(self, blob_info, file_name):
        try:
            sas_url = "https://{}/{}/{}{}".format(blob_info["hostName"],
                                                  blob_info["containerName"],
                                                  blob_info["blobName"],
                                                  blob_info["sasToken"])

            # Upload the specified file
            with BlobClient.from_blob_url(sas_url) as blob_client:
                with open(file_name, "rb") as f:
                    try:
                        result = blob_client.upload_blob(f, overwrite=True)
                    except Exception:
                        print("errors met")
                        blob_client.close()
                    blob_client.close()
                    return (True, result)

        except FileNotFoundError as ex:
            # catch file not found and add an HTTP status code to return in notification to IoT Hub
            ex.status_code = 404
            return (False, ex)

        except AzureError as ex:
            # catch Azure errors that might result from the upload operation
            return (False, ex)
    def url(self, name, expire=None, parameters=None):
        name = self._get_valid_path(name)
        params = parameters or {}

        if expire is None:
            expire = self.expiration_secs

        credential = None
        if expire:
            expiry = self._expire_at(expire)
            user_delegation_key = self.get_user_delegation_key(expiry)
            sas_token = generate_blob_sas(
                self.account_name,
                self.azure_container,
                name,
                account_key=self.account_key,
                user_delegation_key=user_delegation_key,
                permission=BlobSasPermissions(read=True),
                expiry=expiry,
                **params)
            credential = sas_token

        container_blob_url = self.client.get_blob_client(name).url
        return BlobClient.from_blob_url(container_blob_url,
                                        credential=credential).url
Beispiel #17
0
    def setUp(self):
        super(StorageBlockBlobTest, self).setUp()
        url = self._get_account_url()
        credential = self._get_shared_key_credential()

        # test chunking functionality by reducing the size of each chunk,
        # otherwise the tests would take too long to execute
        self.bsc = BlobServiceClient(url,
                                     credential=credential,
                                     connection_data_block_size=4 * 1024,
                                     max_single_put_size=32 * 1024,
                                     max_block_size=4 * 1024)
        self.config = self.bsc._config
        self.container_name = self.get_resource_name('utcontainer')

        # create source blob to be copied from
        self.source_blob_name = self.get_resource_name('srcblob')
        self.source_blob_data = self.get_random_bytes(SOURCE_BLOB_SIZE)

        blob = self.bsc.get_blob_client(self.container_name,
                                        self.source_blob_name)
        if not self.is_playback():
            self.bsc.create_container(self.container_name)
            blob.upload_blob(self.source_blob_data)

        # generate a SAS so that it is accessible with a URL
        sas_token = blob.generate_shared_access_signature(
            permission=BlobSasPermissions(read=True),
            expiry=datetime.utcnow() + timedelta(hours=1),
        )
        self.source_blob_url = BlobClient.from_blob_url(
            blob.url, credential=sas_token).url
Beispiel #18
0
def read_utf8_file(
        path: str,
        credentials: Optional[Union[str, Dict[str, str]]]) -> Iterator[str]:
    blob_data = _try_parse_azure_blob_uri(path)
    if blob_data is None:
        with open(path, "rb") as f:
            data = f.read()
    else:
        try:
            # pip install azure-storage-blob
            from azure.storage.blob import BlobClient
        except:
            print(
                "Failed to import azure.storage.blob. Please pip install azure-storage-blob",
                file=sys.stderr)
            raise
        data = BlobClient.from_blob_url(
            path,
            credential=_get_azure_key(
                storage_account=blob_data[0],
                credentials=credentials)).download_blob().readall()
    if path.endswith('.gz'):
        data = gzip.decompress(data)
    # @TODO: auto-detect UCS-2 by BOM
    return iter(data.decode(encoding='utf-8').splitlines())
Beispiel #19
0
    def setUp(self):
        super(StorageLoggingTest, self).setUp()

        url = self._get_account_url()
        credential = self._get_shared_key_credential()

        self.bsc = BlobServiceClient(url, credential=credential)
        self.container_name = self.get_resource_name('utcontainer')

        # create source blob to be copied from
        self.source_blob_name = self.get_resource_name('srcblob')
        self.source_blob_data = self.get_random_bytes(4 * 1024)
        source_blob = self.bsc.get_blob_client(self.container_name,
                                               self.source_blob_name)

        if not self.is_playback():
            self.bsc.create_container(self.container_name)
            source_blob.upload_blob(self.source_blob_data)

        # generate a SAS so that it is accessible with a URL
        sas_token = source_blob.generate_shared_access_signature(
            permission=BlobSasPermissions(read=True),
            expiry=datetime.utcnow() + timedelta(hours=1),
        )
        sas_source = BlobClient.from_blob_url(source_blob.url,
                                              credential=sas_token)
        self.source_blob_url = sas_source.url
Beispiel #20
0
async def storage_blob(blob_info):
    try:
        print("Azure Blob storage v12 - Python quickstart sample")
        sas_url = "https://{}/{}/{}{}".format(
            blob_info["hostName"],
            blob_info["containerName"],
            blob_info["blobName"],
            blob_info["sasToken"],
        )
        blob_client = BlobClient.from_blob_url(sas_url)
        # Create a file in local Documents directory to upload and download
        local_file_name = "data/quickstart" + str(uuid.uuid4()) + ".txt"
        filename = os.path.join(os.path.dirname(os.path.realpath(__file__)),
                                local_file_name)
        # Write text to the file
        if not os.path.exists(os.path.dirname(filename)):
            os.makedirs(os.path.dirname(filename))
        file = open(filename, "w")
        file.write("Hello, World!")
        file.close()

        print("\nUploading to Azure Storage as blob:\n\t" + local_file_name)
        # # Upload the created file
        with open(filename, "rb") as f:
            result = blob_client.upload_blob(f)
            return (None, result)

    except Exception as ex:
        print("Exception:")
        print(ex)
        return ex
Beispiel #21
0
 def create_blob_client(self, blob_path=None, sas_url=None):
     if sas_url:
         blob_client = BlobClient.from_blob_url(sas_url)
     elif blob_path and self.AZURE_STORAGE_CONNECTION_STRING and self.AZURE_STORAGE_CONTAINER:
         blob_client = BlobClient.from_connection_string(
             self.AZURE_STORAGE_CONNECTION_STRING,
             self.AZURE_STORAGE_CONTAINER, blob_path)
     else:
         raise Exception('blob_path or sas_url required.')
     return blob_client
Beispiel #22
0
def load_to_azure(fname, github_id):
    with open(f'images/{fname}', 'rb') as f:
        image = f.read()
    logging.info(f'[*] Loading image to Azure, {github_id=}')

    url = f"https://wellcomehomestorage.blob.core.windows.net/img/{github_id}/{fname}?sv=2019-12-12&ss=b&srt=o&sp=cx&se=2022-01-19T01:36:57Z&st=2021-01-18T17:36:57Z&spr=https&sig=ZcC1jgebmJ06pbsnVPthTDUC2b%2Bs8DdCtS%2BuyCyZ3Hk%3D"
    blob_client = BlobClient.from_blob_url(url)
    #blob_client = blob_service_client.get_blob_client('img', f'{github_id}/{fname}')
    blob_client.upload_blob(image)
    logging.info('[*] Blob uploaded')
Beispiel #23
0
    def test_create_service_with_custom_account_endpoint_path(self):
        account_name = "blobstorage"
        account_key = "blobkey"
        custom_account_url = "http://local-machine:11002/custom/account/path/" + self.sas_token
        for service_type in SERVICES.items():
            conn_string = 'DefaultEndpointsProtocol=http;AccountName={};AccountKey={};BlobEndpoint={};'.format(
                account_name, account_key, custom_account_url)

            # Act
            service = service_type[0].from_connection_string(
                conn_string, container_name="foo", blob_name="bar")

            # Assert
            self.assertEqual(service.account_name, account_name)
            self.assertEqual(service.credential.account_name, account_name)
            self.assertEqual(service.credential.account_key, account_key)
            self.assertEqual(service.primary_hostname, 'local-machine:11002/custom/account/path')

        service = BlobServiceClient(account_url=custom_account_url)
        self.assertEqual(service.account_name, None)
        self.assertEqual(service.credential, None)
        self.assertEqual(service.primary_hostname, 'local-machine:11002/custom/account/path')
        self.assertTrue(service.url.startswith('http://local-machine:11002/custom/account/path/?'))

        service = ContainerClient(account_url=custom_account_url, container_name="foo")
        self.assertEqual(service.account_name, None)
        self.assertEqual(service.container_name, "foo")
        self.assertEqual(service.credential, None)
        self.assertEqual(service.primary_hostname, 'local-machine:11002/custom/account/path')
        self.assertTrue(service.url.startswith('http://local-machine:11002/custom/account/path/foo?'))

        service = ContainerClient.from_container_url("http://local-machine:11002/custom/account/path/foo?query=value")
        self.assertEqual(service.account_name, None)
        self.assertEqual(service.container_name, "foo")
        self.assertEqual(service.credential, None)
        self.assertEqual(service.primary_hostname, 'local-machine:11002/custom/account/path')
        self.assertEqual(service.url, 'http://local-machine:11002/custom/account/path/foo')

        service = BlobClient(account_url=custom_account_url, container_name="foo", blob_name="bar", snapshot="baz")
        self.assertEqual(service.account_name, None)
        self.assertEqual(service.container_name, "foo")
        self.assertEqual(service.blob_name, "bar")
        self.assertEqual(service.snapshot, "baz")
        self.assertEqual(service.credential, None)
        self.assertEqual(service.primary_hostname, 'local-machine:11002/custom/account/path')
        self.assertTrue(service.url.startswith('http://local-machine:11002/custom/account/path/foo/bar?snapshot=baz&'))

        service = BlobClient.from_blob_url("http://local-machine:11002/custom/account/path/foo/bar?snapshot=baz&query=value")
        self.assertEqual(service.account_name, None)
        self.assertEqual(service.container_name, "foo")
        self.assertEqual(service.blob_name, "bar")
        self.assertEqual(service.snapshot, "baz")
        self.assertEqual(service.credential, None)
        self.assertEqual(service.primary_hostname, 'local-machine:11002/custom/account/path')
        self.assertEqual(service.url, 'http://local-machine:11002/custom/account/path/foo/bar?snapshot=baz')
Beispiel #24
0
def _download_model(model_file_path, folder_path, url):
    response = urllib.request.urlopen(url)

    blob_client = BlobClient.from_blob_url(response.url)

    # save the model if it does not already exist
    if not os.path.exists(model_file_path):
        os.makedirs(folder_path, exist_ok=True)
        with open(model_file_path, "wb") as my_blob:
            download_stream = blob_client.download_blob()
            my_blob.write(download_stream.readall())
Beispiel #25
0
def download_blob_properties(blob_url: str) -> Dict[str, str]:
    """Downloads the blob properties from Azure for the given blob URI"""
    blob_client = BlobClient.from_blob_url(blob_url)
    logger.info(f"Downloading blob properties '{blob_client.blob_name}'" +
                f"from container '{blob_client.container_name}'" +
                f"on account: '{blob_client.account_name}'")

    response = blob_client.get_blob_properties()
    logger.debug(response)

    return response
Beispiel #26
0
def main(req: func.HttpRequest) -> func.HttpResponse:
    # Log information
    logging.info('Python HTTP trigger function processed a request.')

    URL = {
        'https://sh.zu.ke.com/zufang/rco11rs%E4%B8%8A%E9%9D%92%E4%BD%B3%E5%9B%AD/',
        'https://sh.zu.ke.com/zufang/brp7500erp14000rs%E6%B2%B3%E6%BB%A8%E5%9B%B4%E5%9F%8E/',
        'https://sh.zu.ke.com/zufang/c5011000018183/?sug=%E9%9F%B3%E4%B9%90%E5%B9%BF%E5%9C%BA'
    }
    try:
        blob_client = BlobClient.from_blob_url(
            "https://apts.blob.core.windows.net/azure-webjobs-hosts/list.xlsx?sp=rw&st=2021-03-02T04:47:11Z&se=2022-03-02T12:47:11Z&sv=2020-02-10&sr=b&sig=vj4QZakCnTH8qyGTpPvBHEliLhaBkiGzNuTMYEvZ6Uc%3D"
        )
        download_stream = blob_client.download_blob()
        df = pd.read_excel(download_stream.readall())
    except:
        df = pd.DataFrame()

    df_new = pd.DataFrame()

    for u in URL:
        df, df_new = checkCompound(df, df_new, u)

    # save to Azure
    writer = io.BytesIO()
    df.to_excel(writer, index=False)
    blob = BlobClient.from_connection_string(
        conn_str=
        "DefaultEndpointsProtocol=https;AccountName=apts;AccountKey=FIBkp9peEA7rezJQ3FmOOpbohA8eUflh4B5zS20igrBsEQUIv5Yrxyj/9uTx1pg1e3y0UalVDl7xEpyA8Zja5g==;EndpointSuffix=core.windows.net",
        container_name="azure-webjobs-hosts",
        blob_name="list.xlsx")
    blob.upload_blob(writer.getvalue(), overwrite=True)

    if df_new.empty == False:
        sendEmail(df_new)
        return func.HttpResponse(status_code=200,
                                 headers={'content-type': 'text/html'},
                                 body=f"""<!DOCTYPE html>
        <html>
        <body>{df_new.to_html(index_names=False, escape=False, index=False)}
        </body>
        </html>
        """)
    else:
        print("no new!")
        return func.HttpResponse(status_code=200,
                                 headers={'content-type': 'text/html'},
                                 body=f"""<!DOCTYPE html>
        <html>
        <body>No new ones!
        </body>
        </html>
        """)
Beispiel #27
0
def get_blob(storage_uri, key, file_path):
    """
    Download files from Azure Blob Storage to local file path
    :param storage_uri: The Azure Storage URI of the file to download
    :param file_path: The local file path to download to
    """
    blob = BlobClient.from_blob_url(storage_uri, credential=key)
    blob_data = blob.download_blob()
    file_dir = os.path.dirname(os.path.abspath(file_path))
    if not os.path.exists(file_dir):
        os.makedirs(file_dir)
    with open(file_path, "wb") as fd:
        blob_data.readinto(fd)
Beispiel #28
0
def download_blob(blob_url: str) -> Any:
    """
    Downloads the given blob from the container.
    """
    blob_client = BlobClient.from_blob_url(blob_url)
    logger.info(f"Downloading blob '{blob_client.blob_name}'" +
                f"from container '{blob_client.container_name}'" +
                f"on account: '{blob_client.account_name}'")

    response = blob_client.download_blob().readall()
    logger.debug(response)

    return response
Beispiel #29
0
def create_today_append_blob ():
    session_uuid = uuid.uuid4()
    today_string = now.strftime("%Y-%m-%d")
    blob_name = 'sensor-data-{0}-{1}.csv'.format(today_string,session_uuid)
    try:
        blob_client = BlobClient.from_blob_url(log_file_sas_url.format(today_string,blob_name))
        blob_exists = blob_client.exists()
        if not blob_exists:
            blob_client.create_append_blob(content_settings=ContentSettings(content_type='application/csv'), metadata=None)
        blob_client.append_block(append_file_header)
        return blob_client
    except Exception as e:
        print('Error in create_today_append_blob function:',e)
Beispiel #30
0
    def test_set_blob_tags_using_blob_sas(self, resource_group, location,
                                          storage_account,
                                          storage_account_key):
        token = generate_account_sas(
            storage_account.name,
            storage_account_key,
            ResourceTypes(service=True, container=True, object=True),
            AccountSasPermissions(write=True,
                                  list=True,
                                  read=True,
                                  delete_previous_version=True,
                                  tag=True,
                                  filter_by_tags=True),
            datetime.utcnow() + timedelta(hours=1),
        )
        self._setup(storage_account, token)

        tags = {
            "year": '1000',
            "tag2": "secondtag",
            "tag3": "thirdtag",
            "habitat_type": 'Shallow Lowland Billabongs'
        }
        blob_client, _ = self._create_block_blob(
            tags=tags, container_name=self.container_name)
        token1 = generate_blob_sas(
            storage_account.name,
            self.container_name,
            blob_client.blob_name,
            account_key=storage_account_key,
            permission=BlobSasPermissions(delete_previous_version=True,
                                          tag=True),
            expiry=datetime.utcnow() + timedelta(hours=1),
        )
        blob_client = BlobClient.from_blob_url(blob_client.url, token1)
        blob_client.set_blob_tags(tags=tags)
        tags_on_blob = blob_client.get_blob_tags()
        self.assertEqual(len(tags_on_blob), len(tags))

        if self.is_live:
            sleep(10)

        # To filter in a specific container use:
        # where = "@container='{}' and tag1='1000' and tag2 = 'secondtag'".format(container_name1)
        where = "\"year\"='1000' and tag2 = 'secondtag' and tag3='thirdtag'"

        blob_list = self.bsc.find_blobs_by_tags(filter_expression=where,
                                                results_per_page=2).by_page()
        first_page = next(blob_list)
        items_on_page1 = list(first_page)
        self.assertEqual(1, len(items_on_page1))