Пример #1
0
def main():
    storage_client = CloudStorageAccount(account_name=storage_account_name,
                                         account_key=storage_account_key)
    blob_service = storage_client.create_block_blob_service()

    years = range(2016, 2019)
    months = range(1, 13)
    days = range(1, 32)
    hours = range(0, 24)
    url_path = "{y}-{m:02d}-{d:02d}-{h}.json.gz"

    for y in years:
        for m in months:
            for d in days:
                for h in hours:
                    p = url_path.format(y=y, m=m, d=d, h=h)

                    if os.path.exists(p + "_done"):
                        print("{} ingested. skipping...".format(p))
                        continue

                    ingest(
                        "https://{}.blob.core.windows.net/{}/{};{}".format(
                            storage_account_name, storage_container, p,
                            storage_account_key),
                        50 * 1024 * 1024,
                    )

                    with open(p + "_done", "w+") as f:
                        f.write(" ")
    def ingest_from_multiple_blobs(self, blobs, delete_sources_on_success,
                                   ingestion_properties):
        """
        Enqueuing an ingest command from azure blobs.

        Parameters
        ----------
        blobs : List of BlobDescriptor.
            The list of blobs to be ingested.
            Please provide the raw blob size to each of the descriptors.
        delete_sources_on_success : bool.
            After a successful ingest, whether to delete the origin files.
        ingestion_properties : kusto_ingest_client.ingestion_properties.IngestionProperties
            The ingestion properties.
        """
        for blob in blobs:
            queues = self._resource_manager.get_ingestion_queues()
            queue_details = random.choice(queues)
            storage_client = CloudStorageAccount(
                queue_details.storage_account_name,
                sas_token=queue_details.sas)
            queue_service = storage_client.create_queue_service()
            authorization_context = self._resource_manager.get_authorization_context(
            )
            ingestion_blob_info = _IngestionBlobInfo(
                blob, ingestion_properties, delete_sources_on_success,
                authorization_context)
            ingestion_blob_info_json = ingestion_blob_info.to_json()
            encoded = base64.b64encode(
                ingestion_blob_info_json.encode("utf-8")).decode("utf-8")
            queue_service.put_message(queue_details.object_name, encoded)
Пример #3
0
def retrieve():
    timestamp = request.args.get('timestamp')
    name = request.args.get('name')

    configuration = getConfiguration()
    f = open(configuration['debug_file'], 'a')
    log(f, 'Retrieving - ' + name + '/' + timestamp)

    configuration = getConfiguration()

    account = CloudStorageAccount(account_name=configuration['account_name'],
                                  account_key=configuration['account_key'])

    service = account.create_block_blob_service()

    stream = io.BytesIO()

    service.get_blob_to_stream(container_name=configuration['container_name'],
                               blob_name=name + '/' + timestamp +
                               '/output.csv.gz',
                               stream=stream)

    content = zlib.decompress(stream.getbuffer())

    log(f, 'Retrieved - ' + name + '/' + timestamp)

    return content
Пример #4
0
def store_summary(f, file_name, summary):
    configuration = getConfiguration()

    log(f, 'Account Name: ' + configuration['account_name'])
    log(f, 'Container Name: ' + configuration['container_name'])

    account = CloudStorageAccount(account_name=configuration['account_name'],
                                  account_key=configuration['account_key'])

    folder = configuration['default_folder_name']

    service = account.create_block_blob_service()

    service.create_container(configuration['container_name'])

    log(f, 'Storing Content')
    summary_file = folder + '/' + summary['timestamp'] + '/summary.json'
    summary['summary_file_name'] = summary_file

    service.create_blob_from_stream(configuration['container_name'],
                                    summary['summary_file_name'],
                                    io.BytesIO(json.dumps(summary).encode()))

    log(f, 'Stored: ' + file_name)

    os.remove(file_name)

    log(
        f, 'Completed (Log) : ' + file_name + ' - ' +
        configuration['default_folder_name'] + ' - ' + summary['timestamp'])

    return
    def emulator(self):
        # With account
        account = CloudStorageAccount(is_emulated=True)
        client = account.create_block_blob_service()

        # Directly
        client = BlockBlobService(is_emulated=True)
Пример #6
0
    def emulator(self):
        # With account
        account = CloudStorageAccount(is_emulated=True)
        client = account.create_table_service()

        # Directly
        client = TableService(is_emulated=True)
    def key_auth(self):
        # With account
        account = CloudStorageAccount(account_name='<account_name>', account_key='<account_key>')
        client = account.create_block_blob_service()

        # Directly
        client = BlockBlobService(account_name='<account_name>', account_key='<account_key>')
    def sas_auth(self):
        # With account
        account = CloudStorageAccount(account_name='<account_name>', sas_token='<sas_token>')
        client = account.create_block_blob_service()

        # Directly
        client = BlockBlobService(account_name='<account_name>', sas_token='<sas_token>')
Пример #9
0
 def deleteQueue(self, queueName, storageAccountName, storageKey):
     logging.info('Attempting deletion of queue: %s', queueName)
     account = CloudStorageAccount(storageAccountName, storageKey)
     queue_service = account.create_queue_service()
     if queue_service.exists(queueName):
         queue_service.delete_queue(queueName)
         logging.info('Successfully deleted queue: %s', queueName)
Пример #10
0
 def createQueue(self, queueName, resourceGroupName, storageAccountName, storageKey, subscriptionId):
     logging.info("Creating new Queue with Name: " + storageAccountName + " inside Storage Account: " + storageAccountName)
     self.__waitForStorageWithRetry(resourceGroupName, storageAccountName, subscriptionId, 5)
     account = CloudStorageAccount(storageAccountName, storageKey)
     queue_service = account.create_queue_service()
     queue_service.create_queue(queueName)
     logging.info("Queue created successfully with Name: " + storageAccountName + " inside Storage Account: " + storageAccountName)
Пример #11
0
def shares():
    # Create Container and Share
    global storage_account_key, blob_service, blob_share, file_service, file_share
    sak = storage_client.storage_accounts.list_keys(resourcegroupname,
                                                    storageaccountname)
    storage_account_key = sak.keys[0].value
    cloudstorage_client = CloudStorageAccount(storageaccountname,
                                              storage_account_key)
    blob_service = cloudstorage_client.create_block_blob_service()
    blob_share = blob_service.create_container(
        sharename, public_access=PublicAccess.Container)
    file_service = FileService(account_name=storageaccountname,
                               account_key=storage_account_key)
    file_share = file_service.create_share(sharename)
    # Copy Setup Files to Container and Share
    blob_service.create_blob_from_path(
        sharename,
        filename,
        filename,
    )
    file_service.create_file_from_path(
        sharename,
        '',
        filename,
        filename,
    )
Пример #12
0
def quitarMaquinista():
    print('quitarMaquinista')
    nombre = request.form.get('nombre')
    maquinista = Maquinista.query.filter_by(nombre_m=nombre).first()
    if maquinista is None:
        return redirect(
            url_for('webRoutes.quitMaquinistas', error_maquinista=True))
    else:
        db.session.delete(maquinista)
        db.session.commit()

        STORAGE_ACCOUNT_NAME = 'ficherosmaquinistas'
        STORAGE_ACCOUNT_KEY = 'JKGDYu80C4HWg6DxUyA8mWYouPVAHV9tlB8MO6Xcv5sFKR7KVr+Onw7PLwP7KjMqhdPKTCWFk59NM4m+t/lcGQ=='
        account = CloudStorageAccount(STORAGE_ACCOUNT_NAME,
                                      STORAGE_ACCOUNT_KEY)
        file_service = account.create_file_service()
        files = list(
            file_service.list_directories_and_files(
                'shareficherosmaquinistas', prefix=maquinista.nombre_m))
        for file in files:
            print(file.name)
            file_service.delete_file("shareficherosmaquinistas", None,
                                     file.name)
            print("--------------------")

        return redirect(
            url_for('webRoutes.quitMaquinistas', ok_maquinista=True))
Пример #13
0
    def emulator(self):
        # With account
        account = CloudStorageAccount(is_emulated=True)
        client = account.create_block_blob_service()

        # Directly
        client = BlockBlobService(is_emulated=True)
Пример #14
0
    def ingest_from_file(self, file_descriptor: Union[FileDescriptor, str], ingestion_properties: IngestionProperties):
        """
        Enqueue an ingest command from local files.
        To learn more about ingestion methods go to:
        https://docs.microsoft.com/en-us/azure/data-explorer/ingest-data-overview#ingestion-methods
        :param file_descriptor: a FileDescriptor to be ingested.
        :param azure.kusto.ingest.IngestionProperties ingestion_properties: Ingestion properties.
        """
        containers = self._resource_manager.get_containers()

        if isinstance(file_descriptor, FileDescriptor):
            descriptor = file_descriptor
        else:
            descriptor = FileDescriptor(file_descriptor)

        should_compress = not (
            ingestion_properties.format in [DataFormat.AVRO, DataFormat.ORC, DataFormat.PARQUET]
            or descriptor.path.endswith(".gz")
            or descriptor.path.endswith(".zip")
        )

        with descriptor.open(should_compress) as stream:
            blob_name = "{db}__{table}__{guid}__{file}".format(
                db=ingestion_properties.database, table=ingestion_properties.table, guid=descriptor.source_id or uuid.uuid4(), file=descriptor.stream_name
            )

            container_details = random.choice(containers)
            storage_client = CloudStorageAccount(container_details.storage_account_name, sas_token=container_details.sas)
            blob_service = storage_client.create_block_blob_service()

            blob_service.create_blob_from_stream(container_name=container_details.object_name, blob_name=blob_name, stream=stream)
            url = blob_service.make_blob_url(container_details.object_name, blob_name, sas_token=container_details.sas)

            self.ingest_from_blob(BlobDescriptor(url, descriptor.size, descriptor.source_id), ingestion_properties=ingestion_properties)
    def test_create_service_no_key(self):
        # Arrange

        # Act
        bad_account = CloudStorageAccount('', '')
        with self.assertRaises(ValueError):
            service = bad_account.create_block_blob_service()
Пример #16
0
def main():
    print("Hello World!")
    try:
        account = CloudStorageAccount(config.STORAGE_ACCOUNT_NAME,
                                      config.STORAGE_ACCOUNT_KEY)
        queue_service = account.create_queue_service()
        queue_service.create_queue(config.STORAGE_QUEUE_NAME)
        while True:
            try:
                messages = queue_service.get_messages(
                    config.STORAGE_QUEUE_NAME)
                for message in messages:
                    print('Message for dequeueing is: ', message.content)
                    # Then delete it.
                    # When queue is deleted all messages are deleted, here is done for demo purposes
                    # Deleting requires the message id and pop receipt (returned by get_messages)
                    queue_service.delete_message(config.STORAGE_QUEUE_NAME,
                                                 message.id,
                                                 message.pop_receipt)
                    print('Successfully dequeued message')
            except Exception as e:
                print('Error occurred get_messages:', e)
                continue
    except Exception as e:
        print('Error occurred:', e)
Пример #17
0
def make_blob_client(secrets):
    """
        Creates a blob client object
        :param str storage_account_key: storage account key
        :param str storage_account_name: storage account name
        :param str storage_account_suffix: storage account suffix
    """

    if secrets.shared_key:
        # Set up SharedKeyCredentials
        blob_client = blob.BlockBlobService(
            account_name=secrets.shared_key.storage_account_name,
            account_key=secrets.shared_key.storage_account_key,
            endpoint_suffix=secrets.shared_key.storage_account_suffix)
    else:
        # Set up ServicePrincipalCredentials
        arm_credentials = ServicePrincipalCredentials(
            client_id=secrets.service_principal.client_id,
            secret=secrets.service_principal.credential,
            tenant=secrets.service_principal.tenant_id,
            resource='https://management.core.windows.net/')
        m = RESOURCE_ID_PATTERN.match(
            secrets.service_principal.storage_account_resource_id)
        accountname = m.group('account')
        subscription = m.group('subscription')
        resourcegroup = m.group('resourcegroup')
        mgmt_client = StorageManagementClient(arm_credentials, subscription)
        key = mgmt_client.storage_accounts.list_keys(
            resource_group_name=resourcegroup,
            account_name=accountname).keys[0].value
        storage_client = CloudStorageAccount(accountname, key)
        blob_client = storage_client.create_block_blob_service()

    return blob_client
Пример #18
0
    def test_create_service_no_key(self):
        # Arrange

        # Act
        bad_account = CloudStorageAccount('', '')
        with self.assertRaises(ValueError):
            service = bad_account.create_block_blob_service()
Пример #19
0
def handle(ctx, params):

    account = CloudStorageAccount(
        account_name=ctx["secrets"]["azure_account_name"],
        account_key=ctx["secrets"]["azure_account_key"])
    svc = account.create_block_blob_service()

    source = ctx["event"]["source"]
    m = re.search(r"#/blobServices/default/containers/(.+?)/blobs/(.+)$",
                  source)
    if not m:
        return {"error": "could not parse source %s" % source}
    container, name = m.groups()

    blob = svc.get_blob_to_bytes(container_name=container, blob_name=name)

    rek = boto3.client("rekognition",
                       region_name="us-west-2",
                       aws_access_key_id=ctx["serviceBindings"]["rekognition"]
                       ["REKOGNITION_AWS_ACCESS_KEY_ID"],
                       aws_secret_access_key=ctx["serviceBindings"]
                       ["rekognition"]["REKOGNITION_AWS_SECRET_ACCESS_KEY"])
    rek_resp = rek.detect_labels(Image={'Bytes': blob.content})

    print("Detected labels in " + name)

    fields = []
    metadata = {}
    for label in rek_resp["Labels"]:
        metadata[label["Name"].replace(" ", "")] = str(label["Confidence"])
        fields.append({
            "title": label["Name"],
            "value": label["Confidence"],
            "short": True
        })

    svc.create_blob_from_bytes("dispatchpubliccontainer",
                               blob.name,
                               blob.content,
                               metadata=metadata)

    print("Done... %s" % metadata)

    message = {
        "attachments": [{
            "text":
            "File %s just pushed to %s" % (name, container),
            "fields":
            fields,
            "image_url":
            "https://%s.blob.core.windows.net/dispatchpubliccontainer/%s" %
            (ctx["secrets"]["azure_account_name"], name),
            "color":
            "#F35A00"
        }]
    }

    requests.post(ctx["secrets"]["webhook-url"], json=message)
    return message
Пример #20
0
 def __init__(self, queue):
     #read config
     self.account = CloudStorageAccount(account_name="pythontasks",
                                        sas_token=SAS)
     self.service = self.account.create_file_service()
     self.share = queue
     self.service.create_share(self.share, fail_on_exist=False)
     self.service.create_directory(self.share, TASKS_DIR)  # True
     self.service.create_directory(self.share, RESULTS_DIR)  # True
 def service(self):
     # This won't open a connection or anything,
     # it's akin to a client
     if self._service is None:
         account = CloudStorageAccount(self.account_name,
                                       self.account_key,
                                       is_emulated=self.is_emulated)
         self._service = account.create_block_blob_service()
     return self._service
Пример #22
0
    def create_account_sas_definition(self):
        """
        Creates an account sas definition, to manage storage account and its entities.
        """
        from azure.storage.common import SharedAccessSignature, CloudStorageAccount
        from azure.keyvault.models import SasTokenType, SasDefinitionAttributes
        from azure.keyvault import SecretId

        # To create an account sas definition in the vault we must first create the template. The
        # template_uri for an account sas definition is the intended account sas token signed with an arbitrary key.
        # Use the SharedAccessSignature class from azure.storage.common to create a account sas token
        sas = SharedAccessSignature(
            account_name=self.config.storage_account_name,
            # don't sign the template with the storage account key use key 00000000
            account_key='00000000')
        account_sas_template = sas.generate_account(
            services='bfqt',  # all services blob, file, queue and table
            resource_types='sco',  # all resources service, template, object
            permission='acdlpruw',
            # all permissions add, create, list, process, read, update, write
            expiry='2020-01-01'
        )  # expiry will be ignored and validity period will determine token expiry

        # use the created template to create a sas definition in the vault
        attributes = SasDefinitionAttributes(enabled=True)
        sas_def = self.keyvault_client.set_sas_definition(
            vault_base_url=self.sample_vault_url,
            storage_account_name=self.config.storage_account_name,
            sas_definition_name='acctall',
            template_uri=account_sas_template,
            sas_type=SasTokenType.account,
            validity_period='PT2H',
            sas_definition_attributes=attributes)

        # When the sas definition is created a corresponding managed secret is also created in the vault, the. This
        # secret is used to provision sas tokens according to the sas definition.  Users retrieve the sas token
        # via the get_secret method.

        # get the secret id from the returned SasDefinitionBundle
        sas_secret_id = SecretId(uri=sas_def.secret_id)
        # call get_secret and the value of the returned SecretBundle will be a newly issued sas token
        acct_sas_token = self.keyvault_client.get_secret(
            vault_base_url=sas_secret_id.vault,
            secret_name=sas_secret_id.name,
            secret_version=sas_secret_id.version).value

        # create the cloud storage account object
        cloud_storage_account = CloudStorageAccount(
            account_name=self.config.storage_account_name,
            sas_token=acct_sas_token)

        # create a blob with the account sas token
        blob_service = cloud_storage_account.create_block_blob_service()
        blob_service.create_container('blobcontainer')
        blob_service.create_blob_from_text(container_name='blobcontainer',
                                           blob_name='blob1',
                                           text=u'test blob1 data')
    def test_create_account_sas_and_key(self):
        # Arrange

        # Act
        account = CloudStorageAccount(self.account_name, self.account_key, self.sas_token)
        service = account.create_block_blob_service()

        # Assert
        self.validate_service(service, BlockBlobService)
Пример #24
0
def send_message(message):
    configuration = getConfiguration()

    account = CloudStorageAccount(account_name=configuration['account_name'],
                                  account_key=configuration['account_key'])

    service = account.create_queue_service()
    service.create_queue(configuration['queue_name'])
    service.put_message(configuration['queue_name'], message)
Пример #25
0
    def key_auth(self):
        # With account
        account = CloudStorageAccount(account_name='<account_name>',
                                      account_key='<account_key>')
        client = account.create_table_service()

        # Directly
        client = TableService(account_name='<account_name>',
                              account_key='<account_key>')
Пример #26
0
    def sas_auth(self):
        # With account
        account = CloudStorageAccount(account_name='<account_name>',
                                      sas_token='<sas_token>')
        client = account.create_table_service()

        # Directly
        client = TableService(account_name='<account_name>',
                              sas_token='<sas_token>')
Пример #27
0
 def service(self):
     # This won't open a connection or anything,
     # it's akin to a client
     if self._service is None:
         account = CloudStorageAccount(
             self.account_name,
             self.account_key,
             is_emulated=self.is_emulated)
         self._service = account.create_block_blob_service()
     return self._service
    def public(self):
        # This applies to the blob services only
        # Public access must be enabled on the container or requests will fail

        # With account
        account = CloudStorageAccount(account_name='<account_name>')
        client = account.create_block_blob_service()

        # Directly
        client = BlockBlobService(account_name='<account_name>')
Пример #29
0
    def test_create_account_sas_and_key(self):
        # Arrange

        # Act
        account = CloudStorageAccount(self.account_name, self.account_key,
                                      self.sas_token)
        service = account.create_block_blob_service()

        # Assert
        self.validate_service(service, BlockBlobService)
Пример #30
0
def file_list_blob03(container_name, blob_path):
    storageName = 'dataplatdevblob03'
    storageKey = config.storage[storageName]
    storage = CloudStorageAccount(account_name=storageName,
                                  account_key=storageKey,
                                  sas_token=None,
                                  is_emulated=None)
    # Create a Block Blob Service object
    bbs = storage.create_block_blob_service()
    return (bbs.list_blobs(container_name, blob_path))
Пример #31
0
    def public(self):
        # This applies to the blob services only
        # Public access must be enabled on the container or requests will fail

        # With account
        account = CloudStorageAccount(account_name='<account_name>')
        client = account.create_block_blob_service()

        # Directly
        client = BlockBlobService(account_name='<account_name>')
Пример #32
0
def process():
    output = []

    try:
        configuration = getConfiguration()

        f = open(configuration['debug_file'], 'a')

        guid = request.values.get('guid')

        folder = configuration['default_folder_name']

        blob_name = folder + '/' + guid + ".log"

        file_name = request.args.get('file_name')

        summary = process_canlog(f, file_name)

        target_blob_name = folder + '/' + summary['timestamp'] + '/' + 'can.log'

        summary['status'] = 'uploaded'
        summary['container_name'] = configuration['container_name']
        summary['blob_name'] = target_blob_name
        summary['account_name'] = configuration['account_name']
        summary['queue_name'] = configuration['queue_name']
        summary['file_name'] = 'can.log'

        log(f, 'Renaming blob : ' + target_blob_name)

        account = CloudStorageAccount(
            account_name=configuration['account_name'],
            account_key=configuration['account_key'])

        service = account.create_block_blob_service()
        blob_url = service.make_blob_url(configuration['container_name'],
                                         blob_name)
        service.copy_blob(configuration['container_name'], target_blob_name,
                          blob_url)

        log(f, 'Deleting temporary blob : ' + blob_name)
        service.delete_blob(configuration['container_name'], blob_name)
        log(f, 'Storing Summary ' + blob_name)
        store_summary(f, file_name, summary)
        log(f, 'Sending Message ' + configuration['queue_name'])
        send_message(json.dumps(summary))
        log(f, 'Sent Message ' + configuration['queue_name'])

        return json.dumps(summary).encode()

    except Exception as e:
        log(f, str(e))
        f.close()
        output.append({"status": 'fail', "error": str(e)})

    return json.dumps(output, sort_keys=True)
Пример #33
0
    def test_create_account_emulated(self):
        # Arrange

        # Act
        account = CloudStorageAccount(is_emulated=True)
        service = account.create_block_blob_service()

        # Assert
        self.assertIsNotNone(service)
        self.assertEqual(service.account_name, 'devstoreaccount1')
        self.assertIsNotNone(service.account_key)
Пример #34
0
def initiate(f, file_name, guid):
    configuration = getConfiguration()

    f = open(configuration['debug_file'], 'a')
    log(f, 'Account Name: ' + configuration['account_name'])
    log(f, 'Container Name: ' + configuration['container_name'])

    input_zip = ZipFile(file_name, 'r')

    log(f, 'Initiating (Zip) : ' + file_name)

    folder = ''
    timestamp = '0'

    for name in input_zip.namelist():

        if (name.startswith('GPS.time.sec_BUSDAQ')):
            stream = input_zip.open(name)
            parser = FamosParser(f)
            parser.setIgnoreZero(True)
            parser.setLimit(1)

            # Parsing File
            parser.parse(stream)
            parser.summary()
            data = parser.getData()
            parts = re.search("_([0-9]*)?(\.raw)", name, re.DOTALL)
            folder = parts.group(1)

            iObs = 0

            while iObs < len(data):
                log(f, 'Obs: ' + str(iObs) + ' - ' + str(data[iObs]))
                if (data[iObs] > 0):
                    timestamp = re.sub(r'\..*', '', '%.7f' % data[iObs])
                    break

                iObs += 1

            log(f, 'Found Timestamp: ' + timestamp)

    summary = {"folder": folder, "timestamp": timestamp, "logs": guid + ".zip"}

    account = CloudStorageAccount(account_name=configuration['account_name'],
                                  account_key=configuration['account_key'])

    service = account.create_block_blob_service()
    service.create_blob_from_stream(configuration['container_name'],
                                    folder + '/' + timestamp + '/status.json',
                                    io.BytesIO(json.dumps(summary).encode()))

    log(f, 'Initiated (Zip) : ' + file_name + ' - ' + folder + '/' + timestamp)

    return summary
    def test_create_account_emulated(self):
        # Arrange

        # Act
        account = CloudStorageAccount(is_emulated=True)
        service = account.create_block_blob_service()

        # Assert
        self.assertIsNotNone(service)
        self.assertEqual(service.account_name, 'devstoreaccount1')
        self.assertIsNotNone(service.account_key)
Пример #36
0
    def ingest_from_dataframe(self, df, ingestion_properties):
        """Enqueuing an ingest command from local files.
        :param pandas.DataFrame df: input dataframe to ingest.
        :param azure.kusto.ingest.IngestionProperties ingestion_properties: Ingestion properties.
        
        """

        from pandas import DataFrame

        if not isinstance(df, DataFrame):
            raise ValueError("Expected DataFrame instance, found {}".format(
                type(df)))

        file_name = "df_{id}_{timestamp}_{pid}.csv.gz".format(id=id(df),
                                                              timestamp=int(
                                                                  time.time()),
                                                              pid=os.getpid())
        temp_file_path = os.path.join(tempfile.gettempdir(), file_name)

        df.to_csv(temp_file_path,
                  index=False,
                  encoding="utf-8",
                  header=False,
                  compression="gzip")

        fd = FileDescriptor(temp_file_path)

        blob_name = "{db}__{table}__{guid}__{file}".format(
            db=ingestion_properties.database,
            table=ingestion_properties.table,
            guid=uuid.uuid4(),
            file=file_name)

        containers = self._resource_manager.get_containers()
        container_details = random.choice(containers)
        storage_client = CloudStorageAccount(
            container_details.storage_account_name,
            sas_token=container_details.sas)
        blob_service = storage_client.create_block_blob_service()

        blob_service.create_blob_from_path(
            container_name=container_details.object_name,
            blob_name=blob_name,
            file_path=temp_file_path)

        url = blob_service.make_blob_url(container_details.object_name,
                                         blob_name,
                                         sas_token=container_details.sas)

        self.ingest_from_blob(BlobDescriptor(url, fd.size),
                              ingestion_properties=ingestion_properties)

        fd.delete_files()
        os.unlink(temp_file_path)
    def test_create_account_sas(self):
        # Arrange

        # Act
        sas_account = CloudStorageAccount(self.account_name, sas_token=self.sas_token)
        service = sas_account.create_block_blob_service()

        # Assert
        self.assertIsNotNone(service)
        self.assertEqual(service.account_name, self.account_name)
        self.assertIsNone(service.account_key)
        self.assertEqual(service.sas_token, self.sas_token)
Пример #38
0
    def create_account_sas_definition(self, storage_account_name, vault_url):
        """
        Creates an account sas definition, to manage storage account and its entities.
        """
        from azure.storage.common import SharedAccessSignature, CloudStorageAccount
        from azure.keyvault.models import SasTokenType, SasDefinitionAttributes
        from azure.keyvault import SecretId

        # To create an account sas definition in the vault we must first create the template. The
        # template_uri for an account sas definition is the intended account sas token signed with an arbitrary key.
        # Use the SharedAccessSignature class from azure.storage.common to create a account sas token
        sas = SharedAccessSignature(account_name=storage_account_name,
                                    # don't sign the template with the storage account key use key 00000000
                                    account_key='00000000')
        account_sas_template = sas.generate_account(services='bfqt',  # all services blob, file, queue and table
                                                    resource_types='sco',  # all resources service, template, object
                                                    permission='acdlpruw',
                                                    # all permissions add, create, list, process, read, update, write
                                                    expiry='2020-01-01')  # expiry will be ignored and validity period will determine token expiry

        # use the created template to create a sas definition in the vault
        attributes = SasDefinitionAttributes(enabled=True)
        sas_def = self.client.set_sas_definition(vault_base_url=vault_url,
                                                          storage_account_name=storage_account_name,
                                                          sas_definition_name='acctall',
                                                          template_uri=account_sas_template,
                                                          sas_type=SasTokenType.account,
                                                          validity_period='PT2H',
                                                          sas_definition_attributes=attributes)

        # When the sas definition is created a corresponding managed secret is also created in the vault, the. This
        # secret is used to provision sas tokens according to the sas definition.  Users retrieve the sas token
        # via the get_secret method.

        # get the secret id from the returned SasDefinitionBundle
        sas_secret_id = SecretId(uri=sas_def.secret_id)
        # call get_secret and the value of the returned SecretBundle will be a newly issued sas token
        acct_sas_token = self.client.get_secret(vault_base_url=sas_secret_id.vault,
                                                         secret_name=sas_secret_id.name,
                                                         secret_version=sas_secret_id.version).value

        # create the cloud storage account object
        cloud_storage_account = CloudStorageAccount(account_name=storage_account_name,
                                                    sas_token=acct_sas_token)

        # create a blob with the account sas token
        blob_service = cloud_storage_account.create_block_blob_service()
        blob_service.create_container('blobcontainer')
        blob_service.create_blob_from_text(container_name='blobcontainer',
                                           blob_name='blob1',
                                           text=u'test blob1 data')
class StorageAccountTest(StorageTestCase):
    def setUp(self):
        super(StorageAccountTest, self).setUp()
        self.account_name = self.settings.STORAGE_ACCOUNT_NAME
        self.account_key = self.settings.STORAGE_ACCOUNT_KEY
        self.sas_token = '?sv=2015-04-05&st=2015-04-29T22%3A18%3A26Z&se=2015-04-30T02%3A23%3A26Z&sr=b&sp=rw&sip=168.1.5.60-168.1.5.70&spr=https&sig=Z%2FRHIX5Xcg0Mq2rqI3OlWTjEg2tYkboXr1P9ZUXDtkk%3D'
        self.account = CloudStorageAccount(self.account_name, self.account_key)

    # --Helpers-----------------------------------------------------------------
    def validate_service(self, service, type):
        self.assertIsNotNone(service)
        self.assertIsInstance(service, type)
        self.assertEqual(service.account_name, self.account_name)
        self.assertEqual(service.account_key, self.account_key)

    # --Test cases --------------------------------------------------------
    def test_create_block_blob_service(self):
        # Arrange

        # Act
        service = self.account.create_block_blob_service()

        # Assert
        self.validate_service(service, BlockBlobService)

    def test_create_page_blob_service(self):
        # Arrange

        # Act
        service = self.account.create_page_blob_service()

        # Assert
        self.validate_service(service, PageBlobService)

    def test_create_append_blob_service(self):
        # Arrange

        # Act
        service = self.account.create_append_blob_service()

        # Assert
        self.validate_service(service, AppendBlobService)

    def test_create_queue_service(self):
        # Arrange

        # Act
        service = self.account.create_queue_service()

        # Assert
        self.validate_service(service, QueueService)

    def test_create_file_service(self):
        # Arrange

        # Act
        service = self.account.create_file_service()

        # Assert
        self.validate_service(service, FileService)

    def test_create_service_no_key(self):
        # Arrange

        # Act
        bad_account = CloudStorageAccount('', '')
        with self.assertRaises(ValueError):
            service = bad_account.create_block_blob_service()

            # Assert

    def test_create_account_sas(self):
        # Arrange

        # Act
        sas_account = CloudStorageAccount(self.account_name, sas_token=self.sas_token)
        service = sas_account.create_block_blob_service()

        # Assert
        self.assertIsNotNone(service)
        self.assertEqual(service.account_name, self.account_name)
        self.assertIsNone(service.account_key)
        self.assertEqual(service.sas_token, self.sas_token)

    def test_create_account_sas_and_key(self):
        # Arrange

        # Act
        account = CloudStorageAccount(self.account_name, self.account_key, self.sas_token)
        service = account.create_block_blob_service()

        # Assert
        self.validate_service(service, BlockBlobService)

    def test_create_account_emulated(self):
        # Arrange

        # Act
        account = CloudStorageAccount(is_emulated=True)
        service = account.create_block_blob_service()

        # Assert
        self.assertIsNotNone(service)
        self.assertEqual(service.account_name, 'devstoreaccount1')
        self.assertIsNotNone(service.account_key)

    @record
    def test_generate_account_sas(self):
        # SAS URL is calculated from storage key, so this test runs live only
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        token = self.account.generate_shared_access_signature(
            Services.BLOB,
            ResourceTypes.OBJECT,
            AccountPermissions.READ,
            datetime.utcnow() + timedelta(hours=1),
        )

        service = self.account.create_block_blob_service()
        data = b'shared access signature with read permission on blob'
        container_name = 'container1'
        blob_name = 'blob1.txt'

        try:
            service.create_container(container_name)
            service.create_blob_from_bytes(container_name, blob_name, data)

            # Act
            url = service.make_blob_url(
                container_name,
                blob_name,
                sas_token=token,
            )
            response = requests.get(url)

            # Assert
            self.assertTrue(response.ok)
            self.assertEqual(data, response.content)
        finally:
            service.delete_container(container_name)
 def setUp(self):
     super(StorageAccountTest, self).setUp()
     self.account_name = self.settings.STORAGE_ACCOUNT_NAME
     self.account_key = self.settings.STORAGE_ACCOUNT_KEY
     self.sas_token = '?sv=2015-04-05&st=2015-04-29T22%3A18%3A26Z&se=2015-04-30T02%3A23%3A26Z&sr=b&sp=rw&sip=168.1.5.60-168.1.5.70&spr=https&sig=Z%2FRHIX5Xcg0Mq2rqI3OlWTjEg2tYkboXr1P9ZUXDtkk%3D'
     self.account = CloudStorageAccount(self.account_name, self.account_key)