Example #1
0
    def test_create_service_no_key(self):
        # Arrange

        # Act
        bad_account = CloudStorageAccount('', '')
        with self.assertRaises(ValueError):
            service = bad_account.create_block_blob_service()
Example #2
0
def make_blob_client(secrets):
    """
        Creates a blob client object
        :param str storage_account_key: storage account key
        :param str storage_account_name: storage account name
        :param str storage_account_suffix: storage account suffix
    """

    if secrets.shared_key:
        # Set up SharedKeyCredentials
        blob_client = blob.BlockBlobService(
            account_name=secrets.shared_key.storage_account_name,
            account_key=secrets.shared_key.storage_account_key,
            endpoint_suffix=secrets.shared_key.storage_account_suffix)
    else:
        # Set up ServicePrincipalCredentials
        arm_credentials = ServicePrincipalCredentials(
            client_id=secrets.service_principal.client_id,
            secret=secrets.service_principal.credential,
            tenant=secrets.service_principal.tenant_id,
            resource='https://management.core.windows.net/')
        m = RESOURCE_ID_PATTERN.match(
            secrets.service_principal.storage_account_resource_id)
        accountname = m.group('account')
        subscription = m.group('subscription')
        resourcegroup = m.group('resourcegroup')
        mgmt_client = StorageManagementClient(arm_credentials, subscription)
        key = mgmt_client.storage_accounts.list_keys(
            resource_group_name=resourcegroup,
            account_name=accountname).keys[0].value
        storage_client = CloudStorageAccount(accountname, key)
        blob_client = storage_client.create_block_blob_service()

    return blob_client
    def ingest_from_file(self, file_descriptor: Union[FileDescriptor, str], ingestion_properties: IngestionProperties):
        """
        Enqueue an ingest command from local files.
        To learn more about ingestion methods go to:
        https://docs.microsoft.com/en-us/azure/data-explorer/ingest-data-overview#ingestion-methods
        :param file_descriptor: a FileDescriptor to be ingested.
        :param azure.kusto.ingest.IngestionProperties ingestion_properties: Ingestion properties.
        """
        containers = self._resource_manager.get_containers()

        if isinstance(file_descriptor, FileDescriptor):
            descriptor = file_descriptor
        else:
            descriptor = FileDescriptor(file_descriptor)

        should_compress = not (
            ingestion_properties.format in [DataFormat.AVRO, DataFormat.ORC, DataFormat.PARQUET]
            or descriptor.path.endswith(".gz")
            or descriptor.path.endswith(".zip")
        )

        with descriptor.open(should_compress) as stream:
            blob_name = "{db}__{table}__{guid}__{file}".format(
                db=ingestion_properties.database, table=ingestion_properties.table, guid=descriptor.source_id or uuid.uuid4(), file=descriptor.stream_name
            )

            container_details = random.choice(containers)
            storage_client = CloudStorageAccount(container_details.storage_account_name, sas_token=container_details.sas)
            blob_service = storage_client.create_block_blob_service()

            blob_service.create_blob_from_stream(container_name=container_details.object_name, blob_name=blob_name, stream=stream)
            url = blob_service.make_blob_url(container_details.object_name, blob_name, sas_token=container_details.sas)

            self.ingest_from_blob(BlobDescriptor(url, descriptor.size, descriptor.source_id), ingestion_properties=ingestion_properties)
    def sas_auth(self):
        # With account
        account = CloudStorageAccount(account_name='<account_name>', sas_token='<sas_token>')
        client = account.create_block_blob_service()

        # Directly
        client = BlockBlobService(account_name='<account_name>', sas_token='<sas_token>')
    def emulator(self):
        # With account
        account = CloudStorageAccount(is_emulated=True)
        client = account.create_block_blob_service()

        # Directly
        client = BlockBlobService(is_emulated=True)
Example #6
0
def retrieve():
    timestamp = request.args.get('timestamp')
    name = request.args.get('name')

    configuration = getConfiguration()
    f = open(configuration['debug_file'], 'a')
    log(f, 'Retrieving - ' + name + '/' + timestamp)

    configuration = getConfiguration()

    account = CloudStorageAccount(account_name=configuration['account_name'],
                                  account_key=configuration['account_key'])

    service = account.create_block_blob_service()

    stream = io.BytesIO()

    service.get_blob_to_stream(container_name=configuration['container_name'],
                               blob_name=name + '/' + timestamp +
                               '/output.csv.gz',
                               stream=stream)

    content = zlib.decompress(stream.getbuffer())

    log(f, 'Retrieved - ' + name + '/' + timestamp)

    return content
    def key_auth(self):
        # With account
        account = CloudStorageAccount(account_name='<account_name>', account_key='<account_key>')
        client = account.create_block_blob_service()

        # Directly
        client = BlockBlobService(account_name='<account_name>', account_key='<account_key>')
Example #8
0
def shares():
    # Create Container and Share
    global storage_account_key, blob_service, blob_share, file_service, file_share
    sak = storage_client.storage_accounts.list_keys(resourcegroupname,
                                                    storageaccountname)
    storage_account_key = sak.keys[0].value
    cloudstorage_client = CloudStorageAccount(storageaccountname,
                                              storage_account_key)
    blob_service = cloudstorage_client.create_block_blob_service()
    blob_share = blob_service.create_container(
        sharename, public_access=PublicAccess.Container)
    file_service = FileService(account_name=storageaccountname,
                               account_key=storage_account_key)
    file_share = file_service.create_share(sharename)
    # Copy Setup Files to Container and Share
    blob_service.create_blob_from_path(
        sharename,
        filename,
        filename,
    )
    file_service.create_file_from_path(
        sharename,
        '',
        filename,
        filename,
    )
    def emulator(self):
        # With account
        account = CloudStorageAccount(is_emulated=True)
        client = account.create_block_blob_service()

        # Directly
        client = BlockBlobService(is_emulated=True)
    def test_create_service_no_key(self):
        # Arrange

        # Act
        bad_account = CloudStorageAccount('', '')
        with self.assertRaises(ValueError):
            service = bad_account.create_block_blob_service()
Example #11
0
def main():
    storage_client = CloudStorageAccount(account_name=storage_account_name,
                                         account_key=storage_account_key)
    blob_service = storage_client.create_block_blob_service()

    years = range(2016, 2019)
    months = range(1, 13)
    days = range(1, 32)
    hours = range(0, 24)
    url_path = "{y}-{m:02d}-{d:02d}-{h}.json.gz"

    for y in years:
        for m in months:
            for d in days:
                for h in hours:
                    p = url_path.format(y=y, m=m, d=d, h=h)

                    if os.path.exists(p + "_done"):
                        print("{} ingested. skipping...".format(p))
                        continue

                    ingest(
                        "https://{}.blob.core.windows.net/{}/{};{}".format(
                            storage_account_name, storage_container, p,
                            storage_account_key),
                        50 * 1024 * 1024,
                    )

                    with open(p + "_done", "w+") as f:
                        f.write(" ")
Example #12
0
def store_summary(f, file_name, summary):
    configuration = getConfiguration()

    log(f, 'Account Name: ' + configuration['account_name'])
    log(f, 'Container Name: ' + configuration['container_name'])

    account = CloudStorageAccount(account_name=configuration['account_name'],
                                  account_key=configuration['account_key'])

    folder = configuration['default_folder_name']

    service = account.create_block_blob_service()

    service.create_container(configuration['container_name'])

    log(f, 'Storing Content')
    summary_file = folder + '/' + summary['timestamp'] + '/summary.json'
    summary['summary_file_name'] = summary_file

    service.create_blob_from_stream(configuration['container_name'],
                                    summary['summary_file_name'],
                                    io.BytesIO(json.dumps(summary).encode()))

    log(f, 'Stored: ' + file_name)

    os.remove(file_name)

    log(
        f, 'Completed (Log) : ' + file_name + ' - ' +
        configuration['default_folder_name'] + ' - ' + summary['timestamp'])

    return
Example #13
0
def handle(ctx, params):

    account = CloudStorageAccount(
        account_name=ctx["secrets"]["azure_account_name"],
        account_key=ctx["secrets"]["azure_account_key"])
    svc = account.create_block_blob_service()

    source = ctx["event"]["source"]
    m = re.search(r"#/blobServices/default/containers/(.+?)/blobs/(.+)$",
                  source)
    if not m:
        return {"error": "could not parse source %s" % source}
    container, name = m.groups()

    blob = svc.get_blob_to_bytes(container_name=container, blob_name=name)

    rek = boto3.client("rekognition",
                       region_name="us-west-2",
                       aws_access_key_id=ctx["serviceBindings"]["rekognition"]
                       ["REKOGNITION_AWS_ACCESS_KEY_ID"],
                       aws_secret_access_key=ctx["serviceBindings"]
                       ["rekognition"]["REKOGNITION_AWS_SECRET_ACCESS_KEY"])
    rek_resp = rek.detect_labels(Image={'Bytes': blob.content})

    print("Detected labels in " + name)

    fields = []
    metadata = {}
    for label in rek_resp["Labels"]:
        metadata[label["Name"].replace(" ", "")] = str(label["Confidence"])
        fields.append({
            "title": label["Name"],
            "value": label["Confidence"],
            "short": True
        })

    svc.create_blob_from_bytes("dispatchpubliccontainer",
                               blob.name,
                               blob.content,
                               metadata=metadata)

    print("Done... %s" % metadata)

    message = {
        "attachments": [{
            "text":
            "File %s just pushed to %s" % (name, container),
            "fields":
            fields,
            "image_url":
            "https://%s.blob.core.windows.net/dispatchpubliccontainer/%s" %
            (ctx["secrets"]["azure_account_name"], name),
            "color":
            "#F35A00"
        }]
    }

    requests.post(ctx["secrets"]["webhook-url"], json=message)
    return message
Example #14
0
 def __init__(self, account: CloudStorageAccount):
     self._logger = logging.getLogger("liteflow.providers.azure")
     self._leases = {}
     self._service = account.create_block_blob_service()
     self._service.create_container(self.container_name,
                                    public_access=PublicAccess.Blob)
     self._lock = threading.Lock()
     self._renew_timer = threading.Timer(30, self.renew_leases)
     self._renew_timer.start()
Example #15
0
    def create_account_sas_definition(self):
        """
        Creates an account sas definition, to manage storage account and its entities.
        """
        from azure.storage.common import SharedAccessSignature, CloudStorageAccount
        from azure.keyvault.models import SasTokenType, SasDefinitionAttributes
        from azure.keyvault import SecretId

        # To create an account sas definition in the vault we must first create the template. The
        # template_uri for an account sas definition is the intended account sas token signed with an arbitrary key.
        # Use the SharedAccessSignature class from azure.storage.common to create a account sas token
        sas = SharedAccessSignature(
            account_name=self.config.storage_account_name,
            # don't sign the template with the storage account key use key 00000000
            account_key='00000000')
        account_sas_template = sas.generate_account(
            services='bfqt',  # all services blob, file, queue and table
            resource_types='sco',  # all resources service, template, object
            permission='acdlpruw',
            # all permissions add, create, list, process, read, update, write
            expiry='2020-01-01'
        )  # expiry will be ignored and validity period will determine token expiry

        # use the created template to create a sas definition in the vault
        attributes = SasDefinitionAttributes(enabled=True)
        sas_def = self.keyvault_client.set_sas_definition(
            vault_base_url=self.sample_vault_url,
            storage_account_name=self.config.storage_account_name,
            sas_definition_name='acctall',
            template_uri=account_sas_template,
            sas_type=SasTokenType.account,
            validity_period='PT2H',
            sas_definition_attributes=attributes)

        # When the sas definition is created a corresponding managed secret is also created in the vault, the. This
        # secret is used to provision sas tokens according to the sas definition.  Users retrieve the sas token
        # via the get_secret method.

        # get the secret id from the returned SasDefinitionBundle
        sas_secret_id = SecretId(uri=sas_def.secret_id)
        # call get_secret and the value of the returned SecretBundle will be a newly issued sas token
        acct_sas_token = self.keyvault_client.get_secret(
            vault_base_url=sas_secret_id.vault,
            secret_name=sas_secret_id.name,
            secret_version=sas_secret_id.version).value

        # create the cloud storage account object
        cloud_storage_account = CloudStorageAccount(
            account_name=self.config.storage_account_name,
            sas_token=acct_sas_token)

        # create a blob with the account sas token
        blob_service = cloud_storage_account.create_block_blob_service()
        blob_service.create_container('blobcontainer')
        blob_service.create_blob_from_text(container_name='blobcontainer',
                                           blob_name='blob1',
                                           text=u'test blob1 data')
 def service(self):
     # This won't open a connection or anything,
     # it's akin to a client
     if self._service is None:
         account = CloudStorageAccount(self.account_name,
                                       self.account_key,
                                       is_emulated=self.is_emulated)
         self._service = account.create_block_blob_service()
     return self._service
    def test_create_account_sas_and_key(self):
        # Arrange

        # Act
        account = CloudStorageAccount(self.account_name, self.account_key, self.sas_token)
        service = account.create_block_blob_service()

        # Assert
        self.validate_service(service, BlockBlobService)
    def public(self):
        # This applies to the blob services only
        # Public access must be enabled on the container or requests will fail

        # With account
        account = CloudStorageAccount(account_name='<account_name>')
        client = account.create_block_blob_service()

        # Directly
        client = BlockBlobService(account_name='<account_name>')
 def service(self):
     # This won't open a connection or anything,
     # it's akin to a client
     if self._service is None:
         account = CloudStorageAccount(
             self.account_name,
             self.account_key,
             is_emulated=self.is_emulated)
         self._service = account.create_block_blob_service()
     return self._service
Example #20
0
    def test_create_account_sas_and_key(self):
        # Arrange

        # Act
        account = CloudStorageAccount(self.account_name, self.account_key,
                                      self.sas_token)
        service = account.create_block_blob_service()

        # Assert
        self.validate_service(service, BlockBlobService)
Example #21
0
def file_list_blob03(container_name, blob_path):
    storageName = 'dataplatdevblob03'
    storageKey = config.storage[storageName]
    storage = CloudStorageAccount(account_name=storageName,
                                  account_key=storageKey,
                                  sas_token=None,
                                  is_emulated=None)
    # Create a Block Blob Service object
    bbs = storage.create_block_blob_service()
    return (bbs.list_blobs(container_name, blob_path))
    def public(self):
        # This applies to the blob services only
        # Public access must be enabled on the container or requests will fail

        # With account
        account = CloudStorageAccount(account_name='<account_name>')
        client = account.create_block_blob_service()

        # Directly
        client = BlockBlobService(account_name='<account_name>')
Example #23
0
def process():
    output = []

    try:
        configuration = getConfiguration()

        f = open(configuration['debug_file'], 'a')

        guid = request.values.get('guid')

        folder = configuration['default_folder_name']

        blob_name = folder + '/' + guid + ".log"

        file_name = request.args.get('file_name')

        summary = process_canlog(f, file_name)

        target_blob_name = folder + '/' + summary['timestamp'] + '/' + 'can.log'

        summary['status'] = 'uploaded'
        summary['container_name'] = configuration['container_name']
        summary['blob_name'] = target_blob_name
        summary['account_name'] = configuration['account_name']
        summary['queue_name'] = configuration['queue_name']
        summary['file_name'] = 'can.log'

        log(f, 'Renaming blob : ' + target_blob_name)

        account = CloudStorageAccount(
            account_name=configuration['account_name'],
            account_key=configuration['account_key'])

        service = account.create_block_blob_service()
        blob_url = service.make_blob_url(configuration['container_name'],
                                         blob_name)
        service.copy_blob(configuration['container_name'], target_blob_name,
                          blob_url)

        log(f, 'Deleting temporary blob : ' + blob_name)
        service.delete_blob(configuration['container_name'], blob_name)
        log(f, 'Storing Summary ' + blob_name)
        store_summary(f, file_name, summary)
        log(f, 'Sending Message ' + configuration['queue_name'])
        send_message(json.dumps(summary))
        log(f, 'Sent Message ' + configuration['queue_name'])

        return json.dumps(summary).encode()

    except Exception as e:
        log(f, str(e))
        f.close()
        output.append({"status": 'fail', "error": str(e)})

    return json.dumps(output, sort_keys=True)
Example #24
0
class AzureStorageService:
    def __init__(self):
        self.account = CloudStorageAccount(
            account_name=settings.AZURE_ACCOUNT_NAME,
            account_key=settings.AZURE_ACCOUNT_KEY)
        self.block_blob_service = self.account.create_block_blob_service()
        self.container_name = 'arches'

        self.blobs_that_need_fixing = {
            'packages/mapbox/': 'packages/@mapbox/',
            'packages/turf/': 'packages/@turf/'
        }

    def fix_blob_paths(self):
        num_workers = multiprocessing.cpu_count() - 1
        logger.debug(
            "Processing blob paths with a process pool of {0} nodes".format(
                num_workers))
        pool = ProcessPool(num_workers)

        for origin_prefix, target_prefix in self.blobs_that_need_fixing.iteritems(
        ):
            blobs_under_prefix = self.block_blob_service.list_blobs(
                self.container_name, prefix=origin_prefix)
            arguments = [(origin_prefix, target_prefix, blob)
                         for blob in blobs_under_prefix]
            pool.map(self._fix_blob_path, arguments)

    def _fix_blob_path(self, arguments):
        (origin_prefix, target_prefix, blob) = arguments

        target_blob_name = self.resolve_target_blob_name(
            blob, origin_prefix, target_prefix)

        if not self.block_blob_service.exists(self.container_name,
                                              target_blob_name):
            self.copy_blob(blob.name, target_blob_name)

    def resolve_target_blob_name(self, blob, origin_prefix, target_prefix):
        path_after_prefix = os.path.relpath(blob.name, origin_prefix)
        return target_prefix + path_after_prefix

    def copy_blob(self, origin_blob_name, target_blob_name):
        print 'Copying: {origin_blob_name}'.format(
            origin_blob_name=origin_blob_name)
        print 'Target location: {target_blob_name}'.format(
            target_blob_name=target_blob_name)

        source_url = self.make_blob_url(origin_blob_name)
        self.block_blob_service.copy_blob(self.container_name,
                                          target_blob_name, source_url)

    def make_blob_url(self, blob_name):
        return self.block_blob_service.make_blob_url(self.container_name,
                                                     blob_name)
Example #25
0
    def test_create_account_emulated(self):
        # Arrange

        # Act
        account = CloudStorageAccount(is_emulated=True)
        service = account.create_block_blob_service()

        # Assert
        self.assertIsNotNone(service)
        self.assertEqual(service.account_name, 'devstoreaccount1')
        self.assertIsNotNone(service.account_key)
Example #26
0
def initiate(f, file_name, guid):
    configuration = getConfiguration()

    f = open(configuration['debug_file'], 'a')
    log(f, 'Account Name: ' + configuration['account_name'])
    log(f, 'Container Name: ' + configuration['container_name'])

    input_zip = ZipFile(file_name, 'r')

    log(f, 'Initiating (Zip) : ' + file_name)

    folder = ''
    timestamp = '0'

    for name in input_zip.namelist():

        if (name.startswith('GPS.time.sec_BUSDAQ')):
            stream = input_zip.open(name)
            parser = FamosParser(f)
            parser.setIgnoreZero(True)
            parser.setLimit(1)

            # Parsing File
            parser.parse(stream)
            parser.summary()
            data = parser.getData()
            parts = re.search("_([0-9]*)?(\.raw)", name, re.DOTALL)
            folder = parts.group(1)

            iObs = 0

            while iObs < len(data):
                log(f, 'Obs: ' + str(iObs) + ' - ' + str(data[iObs]))
                if (data[iObs] > 0):
                    timestamp = re.sub(r'\..*', '', '%.7f' % data[iObs])
                    break

                iObs += 1

            log(f, 'Found Timestamp: ' + timestamp)

    summary = {"folder": folder, "timestamp": timestamp, "logs": guid + ".zip"}

    account = CloudStorageAccount(account_name=configuration['account_name'],
                                  account_key=configuration['account_key'])

    service = account.create_block_blob_service()
    service.create_blob_from_stream(configuration['container_name'],
                                    folder + '/' + timestamp + '/status.json',
                                    io.BytesIO(json.dumps(summary).encode()))

    log(f, 'Initiated (Zip) : ' + file_name + ' - ' + folder + '/' + timestamp)

    return summary
    def ingest_from_dataframe(self, df, ingestion_properties):
        """Enqueuing an ingest command from local files.
        :param pandas.DataFrame df: input dataframe to ingest.
        :param azure.kusto.ingest.IngestionProperties ingestion_properties: Ingestion properties.
        
        """

        from pandas import DataFrame

        if not isinstance(df, DataFrame):
            raise ValueError("Expected DataFrame instance, found {}".format(
                type(df)))

        file_name = "df_{id}_{timestamp}_{pid}.csv.gz".format(id=id(df),
                                                              timestamp=int(
                                                                  time.time()),
                                                              pid=os.getpid())
        temp_file_path = os.path.join(tempfile.gettempdir(), file_name)

        df.to_csv(temp_file_path,
                  index=False,
                  encoding="utf-8",
                  header=False,
                  compression="gzip")

        fd = FileDescriptor(temp_file_path)

        blob_name = "{db}__{table}__{guid}__{file}".format(
            db=ingestion_properties.database,
            table=ingestion_properties.table,
            guid=uuid.uuid4(),
            file=file_name)

        containers = self._resource_manager.get_containers()
        container_details = random.choice(containers)
        storage_client = CloudStorageAccount(
            container_details.storage_account_name,
            sas_token=container_details.sas)
        blob_service = storage_client.create_block_blob_service()

        blob_service.create_blob_from_path(
            container_name=container_details.object_name,
            blob_name=blob_name,
            file_path=temp_file_path)

        url = blob_service.make_blob_url(container_details.object_name,
                                         blob_name,
                                         sas_token=container_details.sas)

        self.ingest_from_blob(BlobDescriptor(url, fd.size),
                              ingestion_properties=ingestion_properties)

        fd.delete_files()
        os.unlink(temp_file_path)
    def test_create_account_emulated(self):
        # Arrange

        # Act
        account = CloudStorageAccount(is_emulated=True)
        service = account.create_block_blob_service()

        # Assert
        self.assertIsNotNone(service)
        self.assertEqual(service.account_name, 'devstoreaccount1')
        self.assertIsNotNone(service.account_key)
    def test_create_account_sas(self):
        # Arrange

        # Act
        sas_account = CloudStorageAccount(self.account_name, sas_token=self.sas_token)
        service = sas_account.create_block_blob_service()

        # Assert
        self.assertIsNotNone(service)
        self.assertEqual(service.account_name, self.account_name)
        self.assertIsNone(service.account_key)
        self.assertEqual(service.sas_token, self.sas_token)
Example #30
0
    def test_create_account_sas(self):
        # Arrange

        # Act
        sas_account = CloudStorageAccount(self.account_name,
                                          sas_token=self.sas_token)
        service = sas_account.create_block_blob_service()

        # Assert
        self.assertIsNotNone(service)
        self.assertEqual(service.account_name, self.account_name)
        self.assertIsNone(service.account_key)
        self.assertEqual(service.sas_token, self.sas_token)
Example #31
0
def upload_to_azure_storage(path):
    storage_client = CloudStorageAccount(account_name=account_name,
                                         account_key=account_key)
    blob_service = storage_client.create_block_blob_service()

    if len(list(blob_service.list_blobs(container_name, path))) > 0 and len(
            list(blob_service.list_blobs(container_name, path + ".gz"))) == 0:
        print("{} exists on azure storage, skipping...".format(path))
    else:
        blob_service.create_blob_from_path(container_name=container_name,
                                           blob_name=path,
                                           file_path=path)
        print("uploaded to storage {}".format(path))
    def create_account_sas_definition(self, storage_account_name, vault_url):
        """
        Creates an account sas definition, to manage storage account and its entities.
        """
        from azure.storage.common import SharedAccessSignature, CloudStorageAccount
        from azure.keyvault.models import SasTokenType, SasDefinitionAttributes
        from azure.keyvault import SecretId

        # To create an account sas definition in the vault we must first create the template. The
        # template_uri for an account sas definition is the intended account sas token signed with an arbitrary key.
        # Use the SharedAccessSignature class from azure.storage.common to create a account sas token
        sas = SharedAccessSignature(account_name=storage_account_name,
                                    # don't sign the template with the storage account key use key 00000000
                                    account_key='00000000')
        account_sas_template = sas.generate_account(services='bfqt',  # all services blob, file, queue and table
                                                    resource_types='sco',  # all resources service, template, object
                                                    permission='acdlpruw',
                                                    # all permissions add, create, list, process, read, update, write
                                                    expiry='2020-01-01')  # expiry will be ignored and validity period will determine token expiry

        # use the created template to create a sas definition in the vault
        attributes = SasDefinitionAttributes(enabled=True)
        sas_def = self.client.set_sas_definition(vault_base_url=vault_url,
                                                          storage_account_name=storage_account_name,
                                                          sas_definition_name='acctall',
                                                          template_uri=account_sas_template,
                                                          sas_type=SasTokenType.account,
                                                          validity_period='PT2H',
                                                          sas_definition_attributes=attributes)

        # When the sas definition is created a corresponding managed secret is also created in the vault, the. This
        # secret is used to provision sas tokens according to the sas definition.  Users retrieve the sas token
        # via the get_secret method.

        # get the secret id from the returned SasDefinitionBundle
        sas_secret_id = SecretId(uri=sas_def.secret_id)
        # call get_secret and the value of the returned SecretBundle will be a newly issued sas token
        acct_sas_token = self.client.get_secret(vault_base_url=sas_secret_id.vault,
                                                         secret_name=sas_secret_id.name,
                                                         secret_version=sas_secret_id.version).value

        # create the cloud storage account object
        cloud_storage_account = CloudStorageAccount(account_name=storage_account_name,
                                                    sas_token=acct_sas_token)

        # create a blob with the account sas token
        blob_service = cloud_storage_account.create_block_blob_service()
        blob_service.create_container('blobcontainer')
        blob_service.create_blob_from_text(container_name='blobcontainer',
                                           blob_name='blob1',
                                           text=u'test blob1 data')
Example #33
0
def process():
    output = []

    try:
        configuration = getConfiguration()
        f = open(configuration['debug_file'], 'a')

        file_name = request.args.get('file_name')
        guid = request.args.get('guid')

        summary = initiate(f, file_name, guid)

        account = CloudStorageAccount(
            account_name=configuration['account_name'],
            account_key=configuration['account_key'])
        service = account.create_block_blob_service()

        blob_name = summary['folder'] + '/' + guid + ".zip"
        target_blob_name = summary['folder'] + '/' + summary[
            'timestamp'] + '/' + configuration['zip_file_name']

        log(f, 'Renaming blob : ' + target_blob_name)

        blob_url = service.make_blob_url(configuration['container_name'],
                                         blob_name)
        service.copy_blob(configuration['container_name'], target_blob_name,
                          blob_url)

        log(f, 'Deleting temporary blob : ' + blob_name)

        service.delete_blob(configuration['container_name'], blob_name)

        log(
            f, 'Submitted (Zip) for processing: ' + file_name + ' - ' +
            summary['folder'] + ' - ' + summary['timestamp'])

        p = mp.Process(target=store,
                       args=(file_name, summary['folder'],
                             summary['timestamp'], guid, target_blob_name))
        p.start()

        return json.dumps(summary).encode()

    except Exception as e:
        log(f, str(e))
        f.close()
        output.append({"status": 'fail', "error": str(e)})

    return json.dumps(output, sort_keys=True)
Example #34
0
def list():
    configuration = getConfiguration()

    print()
    f = open(configuration['debug_file'], 'a')

    folder = request.args.get('folder')

    try:
        log(f, 'Listing Files - request received - [' + folder + ']')

        account = CloudStorageAccount(
            account_name=configuration['account_name'],
            account_key=configuration['account_key'])

        service = account.create_block_blob_service()

        service.create_container(configuration['container_name'])

        output = []

        blobs = service.list_blobs(configuration['container_name'])

        for blob in blobs:
            if (re.match("(.*)\/(.*)\/([s][u|t].*\.json)", blob.name,
                         re.DOTALL)):

                data = re.search("(.*)\/(.*)\/([s][u|t].*\.json)", blob.name,
                                 re.DOTALL)

                if (folder == '' or folder == data.group(1)):
                    output.append({
                        "summary_file": blob.name,
                        "folder": data.group(1),
                        "timestamp": data.group(2),
                        "file_name": data.group(3)
                    })

        f.close()

        return json.dumps(output, sort_keys=True)

    except Exception as e:
        log(f, str(e))

        f.close()
        return ""
Example #35
0
def get_blob_service(storage_account_name, config_path):
    account_key = get_azure_account_key(storage_account_name,
                                        config_path=config_path)
    if account_key is None:
        message = [
            "Cannot find key for '{}' azure account".format(
                storage_account_name),
            "Hint: export DTOOL_AZURE_ACCOUNT_KEY_{}=azure_key".format(
                storage_account_name),
        ]

        raise (KeyError(". ".join(message)))

    account = CloudStorageAccount(account_name=storage_account_name,
                                  account_key=account_key)

    return account.create_block_blob_service()
    def ingest_from_multiple_files(self, files, delete_sources_on_success,
                                   ingestion_properties):
        """
        Enqueuing an ingest command from local files.

        Parameters
        ----------
        files : List of FileDescriptor or file paths.
            The list of files to be ingested.
        delete_sources_on_success : bool.
            After a successful ingest, whether to delete the origin files.
        ingestion_properties : kusto_ingest_client.ingestion_properties.IngestionProperties
            The ingestion properties.
        """
        blobs = list()
        file_descriptors = list()
        for file in files:
            if isinstance(file, FileDescriptor):
                descriptor = file
            else:
                descriptor = FileDescriptor(
                    file, deleteSourcesOnSuccess=delete_sources_on_success)
            file_descriptors.append(descriptor)
            blob_name = (ingestion_properties.database + "__" +
                         ingestion_properties.table + "__" +
                         str(uuid.uuid4()) + "__" + descriptor.stream_name)
            containers = self._resource_manager.get_containers()
            container_details = random.choice(containers)
            storage_client = CloudStorageAccount(
                container_details.storage_account_name,
                sas_token=container_details.sas)
            blob_service = storage_client.create_block_blob_service()
            blob_service.create_blob_from_stream(
                container_name=container_details.object_name,
                blob_name=blob_name,
                stream=descriptor.zipped_stream,
            )
            url = blob_service.make_blob_url(container_details.object_name,
                                             blob_name,
                                             sas_token=container_details.sas)
            blobs.append(BlobDescriptor(url, descriptor.size))
        self.ingest_from_multiple_blobs(blobs, delete_sources_on_success,
                                        ingestion_properties)
        for descriptor in file_descriptors:
            descriptor.delete_files(True)
Example #37
0
def get_blob_client() -> blob.BlockBlobService:
    if not storage_resource_id:
        return blob.BlockBlobService(account_name=storage_account_name,
                                     account_key=storage_account_key,
                                     endpoint_suffix=storage_account_suffix)
    else:
        credentials = ServicePrincipalCredentials(
            client_id=client_id,
            secret=credential,
            tenant=tenant_id,
            resource='https://management.core.windows.net/')
        m = RESOURCE_ID_PATTERN.match(storage_resource_id)
        accountname = m.group('account')
        subscription = m.group('subscription')
        resourcegroup = m.group('resourcegroup')
        mgmt_client = StorageManagementClient(credentials, subscription)
        key = mgmt_client.storage_accounts.list_keys(
            resource_group_name=resourcegroup,
            account_name=accountname).keys[0].value
        storage_client = CloudStorageAccount(accountname, key)
        return storage_client.create_block_blob_service()
Example #38
0
    def ingest_from_file(self, file, ingestion_properties):
        """Enqueuing an ingest command from local files.
        :param files: List of FileDescriptor or file paths. The list of files to be ingested.
        :param azure.kusto.ingest.IngestionProperties ingestion_properties: Ingestion properties.
       
        """
        file_descriptors = list()
        containers = self._resource_manager.get_containers()

        if isinstance(file, FileDescriptor):
            descriptor = file
        else:
            descriptor = FileDescriptor(file)

        file_descriptors.append(descriptor)
        blob_name = "{db}__{table}__{guid}__{file}".format(
            db=ingestion_properties.database,
            table=ingestion_properties.table,
            guid=uuid.uuid4(),
            file=descriptor.stream_name,
        )

        container_details = random.choice(containers)
        storage_client = CloudStorageAccount(
            container_details.storage_account_name,
            sas_token=container_details.sas)
        blob_service = storage_client.create_block_blob_service()

        blob_service.create_blob_from_stream(
            container_name=container_details.object_name,
            blob_name=blob_name,
            stream=descriptor.zipped_stream)
        url = blob_service.make_blob_url(container_details.object_name,
                                         blob_name,
                                         sas_token=container_details.sas)

        self.ingest_from_blob(BlobDescriptor(url, descriptor.size),
                              ingestion_properties=ingestion_properties)
class StorageAccountTest(StorageTestCase):
    def setUp(self):
        super(StorageAccountTest, self).setUp()
        self.account_name = self.settings.STORAGE_ACCOUNT_NAME
        self.account_key = self.settings.STORAGE_ACCOUNT_KEY
        self.sas_token = '?sv=2015-04-05&st=2015-04-29T22%3A18%3A26Z&se=2015-04-30T02%3A23%3A26Z&sr=b&sp=rw&sip=168.1.5.60-168.1.5.70&spr=https&sig=Z%2FRHIX5Xcg0Mq2rqI3OlWTjEg2tYkboXr1P9ZUXDtkk%3D'
        self.account = CloudStorageAccount(self.account_name, self.account_key)

    # --Helpers-----------------------------------------------------------------
    def validate_service(self, service, type):
        self.assertIsNotNone(service)
        self.assertIsInstance(service, type)
        self.assertEqual(service.account_name, self.account_name)
        self.assertEqual(service.account_key, self.account_key)

    # --Test cases --------------------------------------------------------
    def test_create_block_blob_service(self):
        # Arrange

        # Act
        service = self.account.create_block_blob_service()

        # Assert
        self.validate_service(service, BlockBlobService)

    def test_create_page_blob_service(self):
        # Arrange

        # Act
        service = self.account.create_page_blob_service()

        # Assert
        self.validate_service(service, PageBlobService)

    def test_create_append_blob_service(self):
        # Arrange

        # Act
        service = self.account.create_append_blob_service()

        # Assert
        self.validate_service(service, AppendBlobService)

    def test_create_queue_service(self):
        # Arrange

        # Act
        service = self.account.create_queue_service()

        # Assert
        self.validate_service(service, QueueService)

    def test_create_file_service(self):
        # Arrange

        # Act
        service = self.account.create_file_service()

        # Assert
        self.validate_service(service, FileService)

    def test_create_service_no_key(self):
        # Arrange

        # Act
        bad_account = CloudStorageAccount('', '')
        with self.assertRaises(ValueError):
            service = bad_account.create_block_blob_service()

            # Assert

    def test_create_account_sas(self):
        # Arrange

        # Act
        sas_account = CloudStorageAccount(self.account_name, sas_token=self.sas_token)
        service = sas_account.create_block_blob_service()

        # Assert
        self.assertIsNotNone(service)
        self.assertEqual(service.account_name, self.account_name)
        self.assertIsNone(service.account_key)
        self.assertEqual(service.sas_token, self.sas_token)

    def test_create_account_sas_and_key(self):
        # Arrange

        # Act
        account = CloudStorageAccount(self.account_name, self.account_key, self.sas_token)
        service = account.create_block_blob_service()

        # Assert
        self.validate_service(service, BlockBlobService)

    def test_create_account_emulated(self):
        # Arrange

        # Act
        account = CloudStorageAccount(is_emulated=True)
        service = account.create_block_blob_service()

        # Assert
        self.assertIsNotNone(service)
        self.assertEqual(service.account_name, 'devstoreaccount1')
        self.assertIsNotNone(service.account_key)

    @record
    def test_generate_account_sas(self):
        # SAS URL is calculated from storage key, so this test runs live only
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        token = self.account.generate_shared_access_signature(
            Services.BLOB,
            ResourceTypes.OBJECT,
            AccountPermissions.READ,
            datetime.utcnow() + timedelta(hours=1),
        )

        service = self.account.create_block_blob_service()
        data = b'shared access signature with read permission on blob'
        container_name = 'container1'
        blob_name = 'blob1.txt'

        try:
            service.create_container(container_name)
            service.create_blob_from_bytes(container_name, blob_name, data)

            # Act
            url = service.make_blob_url(
                container_name,
                blob_name,
                sas_token=token,
            )
            response = requests.get(url)

            # Assert
            self.assertTrue(response.ok)
            self.assertEqual(data, response.content)
        finally:
            service.delete_container(container_name)