def get_blob_client_by_uri(storage_uri):
        container_name, storage_name, key = StorageUtilities.get_storage_from_uri(storage_uri)

        blob_service = BlockBlobService(account_name=storage_name, account_key=key)
        blob_service.create_container(container_name)

        return blob_service, container_name
def index(request):
    context = RequestContext(request)
    if request.method == 'POST':
        formobj = fileform(request.POST,request.FILES)
        if formobj.is_valid():
            modelobj = formobj.save(commit=False)
            modelobj.image=request.FILES['image']
            modelobj.save()

            mp3= request.FILES['image']

            block_blob_service = BlockBlobService(account_name=myaccount, account_key=mykey)
            block_blob_service.create_container('harshultest2', public_access=PublicAccess.Container)
            block_blob_service.set_container_acl('harshultest2', public_access=PublicAccess.Container)

            with open('t.png','wb+') as formfile:
                for chunk in mp3.chunks():
                    formfile.write(chunk)

            block_blob_service.create_blob_from_path(
                'harshultest2',
                str(request.FILES['image']),
                't.png',

            content_settings=ContentSettings(content_type='image')
                )

            return HttpResponseRedirect(reverse('list'))
    else:
        formobj = fileform()
        return render_to_response('app/myform.html', {'formobj': formobj},context)
    def get_blob_client_by_uri(storage_uri, session):
        storage = StorageUtilities.get_storage_from_uri(storage_uri, session)

        blob_service = BlockBlobService(
            account_name=storage.storage_name,
            token_credential=storage.token)
        blob_service.create_container(storage.container_name)
        return blob_service, storage.container_name, storage.file_prefix
Esempio n. 4
0
 def block_blob_service(self):
     from azure.storage.blob import BlockBlobService, PublicAccess
     block_blob_service = BlockBlobService(
         connection_string=self.conn_string)
     if self.create_if_missing:
         block_blob_service.create_container(
             self.container,
             public_access=PublicAccess.Container if self.public else None
         )
     return block_blob_service
Esempio n. 5
0
class _BlobStorageFileHandler(object):

    def __init__(self,
                  account_name=None,
                  account_key=None,
                  protocol='https',
                  container='logs',
                  zip_compression=False,
                  max_connections=1,
                  max_retries=5,
                  retry_wait=1.0,
                  is_emulated=False):
        self.service = BlockBlobService(account_name=account_name,
                                        account_key=account_key,
                                        is_emulated=is_emulated,
                                        protocol=protocol)
        self.container_created = False
        hostname = gethostname()
        self.meta = {'hostname': hostname.replace('_', '-'),
                     'process': os.getpid()}
        self.container = (container % self.meta).lower()
        self.meta['hostname'] = hostname
        self.zip_compression = zip_compression
        self.max_connections = max_connections
        self.max_retries = max_retries
        self.retry_wait = retry_wait

    def put_file_into_storage(self, dirName, fileName):
        """
        Ship the outdated log file to the specified blob container.
        """
        if not self.container_created:
            self.service.create_container(self.container)
            self.container_created = True
        fd, tmpfile_path = None, ''
        try:
            file_path = os.path.join(dirName, fileName)
            if self.zip_compression:
                suffix, content_type = '.zip', 'application/zip'
                fd, tmpfile_path = mkstemp(suffix=suffix)
                with os.fdopen(fd, 'wb') as f:
                    with ZipFile(f, 'w', ZIP_DEFLATED) as z:
                        z.write(file_path, arcname=fileName)
                file_path = tmpfile_path
            else:
                suffix, content_type = '', 'text/plain'
            self.service.create_blob_from_path(container_name=self.container,
                                               blob_name=fileName+suffix,
                                               file_path=file_path,
                                               content_settings=ContentSettings(content_type=content_type),
                                               max_connections=self.max_connections
                                               )  # max_retries and retry_wait no longer arguments in azure 0.33
        finally:
            if self.zip_compression and fd:
                os.remove(tmpfile_path)
Esempio n. 6
0
        def prepare(self, area):
            assert area is not None, 'area is none; should already be validated'

            area_config = config.load_area(area)

            storage_config = config.load_storage(area_config['storage'])

            blob_service = BlockBlobService(account_name=storage_config['name'], account_key=storage_config['key1'])

            blob_service.create_container(area_config['container'])

            blob_service.set_container_acl(area_config['container'], public_access=PublicAccess.Container)
Esempio n. 7
0
def initialize_backend():
    global _blob_service
    global _container
    global _timeout

    _blob_service = BlockBlobService(
        account_name=getenv_required(_ENV_ACCOUNT_NAME),
        account_key=getenv_required(_ENV_ACCOUNT_KEY))
    _container = getenv(_ENV_CONTAINER, _DEFAULT_CONTAINER)
    _timeout = getenv_int(_ENV_TIMEOUT, _DEFAULT_TIMEOUT)

    _blob_service.create_container(
        _container, fail_on_exist=False, timeout=_timeout)
def run_sample():
    try:
        # Create the BlockBlockService that is used to call the Blob service for the storage account
        block_blob_service = BlockBlobService(account_name='meetpythonstorage', account_key='duOguiKnYb6ZEbJC6BftWqA2lcH67dWkmCSEJj+KxOTOHCNPeV7r4oO6feTw7gSSoFGKHryL4yqSVWlEkm6jWg==')

        # Create a container called 'quickstartblobs'.
        container_name ='quickstartblobs'
        block_blob_service.create_container(container_name)

        # Set the permission so the blobs are public.
        block_blob_service.set_container_acl(container_name, public_access=PublicAccess.Container)

        # Create a file in Documents to test the upload and download.
        local_path=os.path.abspath(os.path.curdir)
        local_file_name =input("Enter file name to upload : ")
        full_path_to_file =os.path.join(local_path, local_file_name)

        # Write text to the file.
        #file = open(full_path_to_file,  'w')
        #file.write("Hello, World!")
        #file.close()

        print("Temp file = " + full_path_to_file)
        print("\nUploading to Blob storage as blob" + local_file_name)

        # Upload the created file, use local_file_name for the blob name
        block_blob_service.create_blob_from_path(container_name, local_file_name, full_path_to_file)

        # List the blobs in the container
        print("\nList blobs in the container")
        generator = block_blob_service.list_blobs(container_name)
        for blob in generator:
            print("\t Blob name: " + blob.name)

        # Download the blob(s).
        # Add '_DOWNLOADED' as prefix to '.txt' so you can see both files in Documents.
        full_path_to_file2 = os.path.join(local_path, str.replace(local_file_name ,'.txt', '_DOWNLOADED.txt'))
        print("\nDownloading blob to " + full_path_to_file2)
        block_blob_service.get_blob_to_path(container_name, local_file_name, full_path_to_file2)

        sys.stdout.write("Sample finished running. When you hit <any key>, the sample will be deleted and the sample "
                         "application will exit.")
        sys.stdout.flush()
        input()

        # Clean up resources. This includes the container and the temp files
        block_blob_service.delete_container(container_name)
        os.remove(full_path_to_file)
        os.remove(full_path_to_file2)
    except Exception as e:
        print(e)
    def upload_assets(self, blob_client: azureblob.BlockBlobService):
        """
        Uploads a the file specified in the json parameters file into a storage container that will 
        delete it's self after 7 days 

        :param blob_client: A blob service client used for making blob operations.
        :type blob_client: `azure.storage.blob.BlockBlobService`
        """
        input_container_name = "fgrp-" + self.job_id
        output_container_name = "fgrp-" + self.job_id + '-output'

        # Create input container
        blob_client.create_container(input_container_name, fail_on_exist=False)
        logger.info('creating a storage container: {}'.format(input_container_name))

        # Create output container
        blob_client.create_container(output_container_name, fail_on_exist=False)
        logger.info('creating a storage container: {}'.format(output_container_name))

        full_sas_url_input = 'https://{}.blob.core.windows.net/{}?{}'.format(
            blob_client.account_name,
            input_container_name,
            utils.get_container_sas_token(
                blob_client,
                input_container_name,
                ContainerPermissions.READ +
                ContainerPermissions.LIST))
        full_sas_url_output = 'https://{}.blob.core.windows.net/{}?{}'.format(
            blob_client.account_name,
            output_container_name,
            utils.get_container_sas_token(
                blob_client,
                output_container_name,
                ContainerPermissions.READ +
                ContainerPermissions.LIST +
                ContainerPermissions.WRITE))

        # Set the storage info for the container.
        self.storage_info = utils.StorageInfo(
            input_container_name,
            output_container_name,
            full_sas_url_input,
            full_sas_url_output)

        # Upload the asset file that will be rendered and
        scenefile = ctm.get_scene_file(self.parameters_file)
        for file in os.listdir("Assets"):
            if scenefile == file:
                file_path = Path("Assets/" + file)
                utils.upload_file_to_container(blob_client, input_container_name, file_path)
Esempio n. 10
0
    def block_blob_service(self):
        from azure.storage.blob import BlockBlobService, PublicAccess
        block_blob_service = BlockBlobService(
            connection_string=self.conn_string,
            socket_timeout=self.socket_timeout,
        )
        if self.max_block_size is not None:
            block_blob_service.MAX_BLOCK_SIZE = self.max_block_size
        if self.max_block_size is not None:
            block_blob_service.MAX_SINGLE_PUT_SIZE = self.max_single_put_size

        if self.create_if_missing:
            block_blob_service.create_container(
                self.container,
                public_access=PublicAccess.Container if self.public else None
            )
        return block_blob_service
Esempio n. 11
0
    def blob_service(self):
        from azure.storage.blob import BlockBlobService
        from azure.common import AzureMissingResourceHttpError

        logger.debug("URL {}".format(self.path_info))
        logger.debug("Connection string {}".format(self.connection_string))
        blob_service = BlockBlobService(
            connection_string=self.connection_string
        )
        logger.debug("Container name {}".format(self.path_info.bucket))
        try:  # verify that container exists
            blob_service.list_blobs(
                self.path_info.bucket, delimiter="/", num_results=1
            )
        except AzureMissingResourceHttpError:
            blob_service.create_container(self.path_info.bucket)
        return blob_service
    def uploadVideo(self):

        outputConnectionString = "DefaultEndpointsProtocol=http;BlobEndpoint=http://" + self.privateIPVM + ":11002/" + self.localStorageName + ";AccountName=" + self.localStorageName + ";AccountKey=" + self.localStorageKey + ";"
        #print("testrb" + self.privateIPVM + "," + self.localStorageContainer + "," + self.localStorageName + "," + self.localStorageKey + "," + outputConnectionString)

        now = datetime.strftime(datetime.now(), "%H%M%S")
        inputFileName = self.videoPath[2:-4] + "_" + now + ".MP4"

        outputBlob = BlockBlobService(connection_string=outputConnectionString)
        print("create container")
        outputBlob.create_container(self.localStorageName, fail_on_exist=False)
        print("end creating container")

        print("start writing to local blob")
        outputBlob.create_blob_from_path(self.localStorageName, inputFileName,
                                         self.videoPath)
        print("end writing to local blob")
    def upload_assets(self, blob_client: azureblob.BlockBlobService):
        """
        Uploads a the file specified in the json parameters file into a storage container that will 
        delete it's self after 7 days 

        :param blob_client: A blob service client used for making blob operations.
        :type blob_client: `azure.storage.blob.BlockBlobService`
        """
        input_container_name = "fgrp-" + self.job_id
        output_container_name = "fgrp-" + self.job_id + '-output'

        # Create input container
        blob_client.create_container(input_container_name, fail_on_exist=False)
        logger.info(
            'creating a storage container: {}'.format(input_container_name))

        # Create output container
        blob_client.create_container(output_container_name,
                                     fail_on_exist=False)
        logger.info(
            'creating a storage container: {}'.format(output_container_name))

        full_sas_url_input = 'https://{}.blob.core.windows.net/{}?{}'.format(
            blob_client.account_name, input_container_name,
            utils.get_container_sas_token(
                blob_client, input_container_name,
                ContainerPermissions.READ + ContainerPermissions.LIST))
        full_sas_url_output = 'https://{}.blob.core.windows.net/{}?{}'.format(
            blob_client.account_name, output_container_name,
            utils.get_container_sas_token(
                blob_client, output_container_name, ContainerPermissions.READ +
                ContainerPermissions.LIST + ContainerPermissions.WRITE))

        # Set the storage info for the container.
        self.storage_info = utils.StorageInfo(input_container_name,
                                              output_container_name,
                                              full_sas_url_input,
                                              full_sas_url_output)

        # Upload the asset file that will be rendered and
        scenefile = ctm.get_scene_file(self.parameters_file)
        for file in os.listdir("Assets"):
            if scenefile == file:
                file_path = Path("Assets/" + file)
                utils.upload_file_to_container(blob_client,
                                               input_container_name, file_path)
Esempio n. 14
0
def run_sample():
    try:
       
        block_blob_service = BlockBlobService(account_name='kazen1', account_key='ds7aI/Mo3YuYaWe9a/tgKrPFf0mnXwOEv+I1EFgmcH3dMSGkc9Pbc1Zzt8hPfa+70TlPNJQU/xPg+nbeoCX5Cg==')

        
        container_name ='kazn1'
        block_blob_service.create_container(container_name)

       
        block_blob_service.set_container_acl(container_name, public_access=PublicAccess.Container)

        
        local_path=os.path.abspath(os.path.curdir)
        local_file_name =input("Enter file name to upload : ")
        full_path_to_file =os.path.join(local_path, local_file_name)

      

        print("Temp file = " + full_path_to_file)
        print("\nUploading to Blob storage as blob" + local_file_name)

       
        block_blob_service.create_blob_from_path(container_name, local_file_name, full_path_to_file)

        
        print("\nList blobs in the container")
        generator = block_blob_service.list_blobs(container_name)
        for blob in generator:
            print("\t Blob name: " + blob.name)

        full_path_to_file2 = os.path.join(local_path, str.replace(local_file_name ,'.txt', '_DOWNLOADED.txt'))
        print("\nDownloading blob to " + full_path_to_file2)
        block_blob_service.get_blob_to_path(container_name, local_file_name, full_path_to_file2)

        sys.stdout.write("Sample finished running. When you hit <any key>, the sample will be deleted and the sample "
                         "application will exit.")
        sys.stdout.flush()
        input()

        
        block_blob_service.delete_container(container_name)
        os.remove(full_path_to_file)
        os.remove(full_path_to_file2)
    except Exception as e:
        print(e)
Esempio n. 15
0
def run_sample():
    try:
        # Create the BlockBlockService that is used to call the Blob service for the storage account
        block_blob_service = BlockBlobService(account_name=cred.account_name, account_key=cred.account_key)

        # Create a container called 'quickstartblobs'.
        container_name ='quickstartblobs'
        block_blob_service.create_container(container_name)

        # Set the permission so the blobs are public.
        block_blob_service.set_container_acl(container_name, public_access=PublicAccess.Container)

        # Create a file in Documents to test the upload and download.
        local_path=os.path.expanduser("~/Documents")
        local_file_name ="QuickStart_" + str(uuid.uuid4()) + ".txt"
        full_path_to_file =os.path.join(local_path, local_file_name)

        # Write text to the file.
        file = open(full_path_to_file,  'w')
        file.write("Hello, World!")
        file.close()

        print("Temp file = " + full_path_to_file)
        print("\nUploading to Blob storage as blob" + local_file_name)

        # Upload the created file, use local_file_name for the blob name
        block_blob_service.create_blob_from_path(container_name, local_file_name, full_path_to_file)

        # List the blobs in the container
        print("\nList blobs in the container")
        generator = block_blob_service.list_blobs(container_name)
        for blob in generator:
            print("\t Blob name: " + blob.name)

        # Download the blob(s).
        # Add '_DOWNLOADED' as prefix to '.txt' so you can see both files in Documents.
        full_path_to_file2 = os.path.join(local_path, str.replace(local_file_name ,'.txt', '_DOWNLOADED.txt'))
        print("\nDownloading blob to " + full_path_to_file2)
        block_blob_service.get_blob_to_path(container_name, local_file_name, full_path_to_file2)

        # Clean up resources. This includes the container and the temp files
        #block_blob_service.delete_container(container_name)
        #os.remove(full_path_to_file)
        #os.remove(full_path_to_file2)
    except Exception as e:
        print(e)
Esempio n. 16
0
def getAzureBlobService(account_name, account_key, container_name):
    try:
        print('\n initializing azure...')
        # Create the BlockBlockService that is used to call the Blob service for the storage account
        block_blob_service = BlockBlobService(account_name=account_name,
                                              account_key=account_key)

        # Create a container called 'quickstartblobs'.
        block_blob_service.create_container(container_name)

        # Set the permission so the blobs are public.
        block_blob_service.set_container_acl(
            container_name, public_access=PublicAccess.Container)
        return block_blob_service

    except Exception as e:
        print(e)
Esempio n. 17
0
File: app.py Progetto: jl789/H2O
def publish_asset():

    # Get parameters for clustering
    parameters = request.get_json()
    '''
    # Uncomment this for OrbitDB hosting (PoC, not Ocean testnet compatible yet)
    execute_js('host.js')
    with open('host.json', 'r') as infile:
        host = json.load(infile)
    '''

    # Azure storage hosting
    azure_account = parameters['azureaccount']

    # Unique container name - requires non-collision * under a single Azure account *
    # 36^4=1679616 possibilities, Pr[collision] = 1 - ( (36^4-1)/36^4 )^num_datasets_created
    container_name = parameters['containername']

    # Generate machine-readable download link to hosted dataset
    azure_url = 'https://' + azure_account + '.blob.core.windows.net/' + container_name + '/output.json'

    try:
        # Create service used to call the Blob service for the storage account
        block_blob_service = BlockBlobService(
            account_name=azure_account, account_key=parameters['azurekey'])

        # Create container with name = asset_id
        block_blob_service.create_container(container_name)

        # Make public
        block_blob_service.set_container_acl(
            container_name, public_access=PublicAccess.Container)

        # Create and upload blob
        block_blob_service.create_blob_from_path(container_name, 'output.json',
                                                 'output.json')

    except Exception as e:
        print(e)

    # Outlier Ventures' abstraction for easy registration with Keeper and Aquarius
    reg.simple_register(parameters['name'], parameters['price'],
                        parameters['description'], parameters['author'],
                        azure_url)

    return ('', 200)
def main(pretrained_model_type, mmlspark_model_type, config_filename,
		 output_model_name, sample_frac):
	# Load the configuration file
	config = ConfigFile(config_filename, pretrained_model_type,
		mmlspark_model_type, output_model_name)
	write_model_summary_to_blob(config, mmlspark_model_type)

	# Log the parameters of the run
	run_logger = get_azureml_logger()
	run_logger.log('amlrealworld.aerial_image_classification.run_mmlspark','true')
	run_logger.log('pretrained_model_type', pretrained_model_type)
	run_logger.log('mmlspark_model_type', mmlspark_model_type)
	run_logger.log('config_filename', config_filename)
	run_logger.log('output_model_name', output_model_name)
	run_logger.log('sample_frac', sample_frac)

	# Train and save the MMLSpark model
	train_df = load_data(config.train_uri, config, sample_frac)
	mmlspark_model = mmlspark.TrainClassifier(
		model=config.mmlspark_model_type, labelCol='label').fit(train_df)
	mmlspark_model.write().overwrite().save(config.output_uri)

	# Apply the MMLSpark model to the test set and save the accuracy metric
	test_df = load_data(config.test_uri, config, sample_frac)
	predictions = mmlspark_model.transform(test_df)
	metrics = mmlspark.ComputeModelStatistics(evaluationMetric='accuracy') \
		.transform(predictions)
	metrics.show()
	run_logger.log('accuracy_on_test_set', metrics.first()['accuracy'])
	
	# Save the predictions
	tf = mmlspark.IndexToValue().setInputCol('scored_labels') \
		.setOutputCol('pred_label')
	predictions = tf.transform(predictions).select(
		'filepath', 'label', 'pred_label')
	output_str = predictions.toPandas().to_csv(index=False)
	blob_service = BlockBlobService(config.storage_account_name,
									config.storage_account_key)
	blob_service.create_container(config.container_prediction_results)
	blob_service.create_blob_from_text(
			config.container_prediction_results,
			config.predictions_filename,
			output_str)

	return
Esempio n. 19
0
def vir_typer_upload(request, vir_typer_pk):
    vir_typer_project = get_object_or_404(VirTyperProject, pk=vir_typer_pk)
    vir_typer_samples = list(
        VirTyperRequest.objects.filter(project_name__pk=vir_typer_pk))
    sample_names = list()
    for sample in vir_typer_samples:
        sample_names.append(str(sample.LSTS_ID))
    if request.method == 'POST':
        seq_files = [
            request.FILES.get('file[%d]' % i)
            for i in range(0, len(request.FILES))
        ]
        if seq_files:
            container_name = VirTyperProject.objects.get(
                pk=vir_typer_pk).container_namer()
            blob_client = BlockBlobService(
                account_name=settings.AZURE_ACCOUNT_NAME,
                account_key=settings.AZURE_ACCOUNT_KEY)
            blob_client.create_container(container_name)
            for item in seq_files:
                blob_client.create_blob_from_bytes(
                    container_name=container_name,
                    blob_name=item.name,
                    blob=item.read())
            for sample in vir_typer_samples:
                # sample_name = '{lsts}_{sn}'.format(lsts=str(sample.LSTS_ID),
                #                                    sn=str(sample.sample_name))
                for seq_file in seq_files:
                    if str(sample.LSTS_ID) in str(seq_file):

                        vir_files = VirTyperFiles(sample_name_id=sample.pk,
                                                  sequence_file=seq_file)
                        vir_files.save()
            vir_typer_project.status = 'Processing'
            vir_typer_project.save()
            run_vir_typer.apply_async(queue='cowbat',
                                      args=(vir_typer_pk, ),
                                      countdown=10)
        return redirect('vir_typer:vir_typer_home')
    return render(
        request, 'vir_typer/vir_typer_upload_sequences.html', {
            'vir_typer_project': vir_typer_project,
            'vir_typer_samples': vir_typer_samples,
            'vir_typer_sample_names': sample_names
        })
Esempio n. 20
0
def storage(host):
    """
    Create blob using azurite.
    """
    bbs = BlockBlobService(
        account_name=USERNAME,
        account_key=KEY,
        custom_domain=f"http://{host}/devstoreaccount1",
    )
    bbs.create_container("data", timeout=1)

    bbs.create_blob_from_bytes("data", "top_file.txt", data)
    bbs.create_blob_from_bytes("data", "root/rfile.txt", data)
    bbs.create_blob_from_bytes("data", "root/a/file.txt", data)
    bbs.create_blob_from_bytes("data", "root/b/file.txt", data)
    bbs.create_blob_from_bytes("data", "root/c/file1.txt", data)
    bbs.create_blob_from_bytes("data", "root/c/file2.txt", data)
    yield bbs
Esempio n. 21
0
def __createstorage():
    global container_name
    global queue_service
    global block_blob_service
    global queue_service
    block_blob_service = BlockBlobService(
        account_name=STORAGE_ACCOUNT_NAME,
        account_key=STORAGE_ACCOUNT_KEY,
        endpoint_suffix=STORAGE_ACCOUNT_SUFFIX)
    timestr = time.strftime("%Y%m%d-%H%M%S")
    container_name = 'fromcamera' + timestr
    block_blob_service.create_container(container_name)
    block_blob_service.set_container_acl(container_name,
                                         public_access=PublicAccess.Container)
    queue_service = QueueService(account_name=STORAGE_ACCOUNT_NAME,
                                 account_key=STORAGE_ACCOUNT_KEY,
                                 endpoint_suffix=STORAGE_ACCOUNT_SUFFIX)
    queue_service.create_queue('fromcamera' + timestr)
Esempio n. 22
0
def connect(config=False):
  import lib.misc as misc 
  from azure.storage.blob import BlockBlobService as BlobService
  global blob_service, container
  # Connect to the cloud service. 
  if not config: config = misc.config['_private']

  container = 'streams'

  if not 'azure' in config:
    logging.debug("no azure config")
    return None, None

  if not blob_service:
    blob_service = BlobService(config['azure']['storage_account_name'], config['azure']['primary_access_key'])
    blob_service.create_container(container)

  return blob_service, container
Esempio n. 23
0
def blob():
    static_dir_path = "D:\home\site\wwwroot\static"
    account_name = 'hanastragetest'
    account_key = 'acount_key'
    container_name = 'images'
    container_url = "https://hanastragetest.blob.core.windows.net/" + container_name

    block_blob_service = BlockBlobService(account_name=account_name,
                                          account_key=account_key)
    app.logger.info("test message : {}".format(block_blob_service))
    # container create
    block_blob_service.create_container(container_name)
    block_blob_service.set_container_acl(container_name,
                                         public_access=PublicAccess.Container)
    #app.logger.info("finish : block_blob_service.set_container_acl")

    files = os.listdir(static_dir_path)
    for file in files:
        # delete
        if block_blob_service.exists(container_name, file):
            block_blob_service.delete_blob(container_name, file)

        # blob write
        block_blob_service.create_blob_from_path(
            container_name,
            file,
            static_dir_path + '\\' + file,
            content_settings=ContentSettings(content_type='image/png'))

    # get container
    generator = block_blob_service.list_blobs(container_name)
    html = ""
    for blob in generator:
        #app.logger.info("generator : {}".format(blob.name))
        html = "{}<img src='{}/{}'>".format(html, container_url, blob.name)
    #app.logger.info("generator_object : {}".format(generator))

    result = {
        "result": True,
        "data": {
            "blob_name": [blob.name for blob in generator]
        }
    }
    return make_response(json.dumps(result, ensure_ascii=False) + html)
def run_sample():
    try:
        # Create the BlockBlockService that is used to call the Blob service for the storage account
        block_blob_service = BlockBlobService(account_name='<account_name>', account_key='<account_key>')

        # Create a container called 'quickstartblobs'.
        container_name ='quickstartblobs'
        block_blob_service.create_container(container_name)

        # Set the permission so the blobs are public.
        block_blob_service.set_container_acl(container_name, public_access=PublicAccess.Container)

        # Create upload the fruit photo
        local_path=os.getcwd()+'\\data'
        local_file_name ='fruit.jpg'
        full_path_to_file =os.path.join(local_path, local_file_name)

        print("\nUploading to Blob storage as blob" + local_file_name)

        # Upload the created file, use local_file_name for the blob name
        block_blob_service.create_blob_from_path(container_name, local_file_name, full_path_to_file)

        # List the blobs in the container
        print("\nList blobs in the container")
        generator = block_blob_service.list_blobs(container_name)
        for blob in generator:
            print("\t Blob name: " + blob.name)

        # Download the blob(s).
        # Add '_DOWNLOADED' as prefix to '.jpg' so you can see both files in Documents.
        full_path_to_file2 = os.path.join(local_path, str.replace(local_file_name ,'.jpg', '_DOWNLOADED.jpg'))
        print("\nDownloading blob to " + full_path_to_file2)
        block_blob_service.get_blob_to_path(container_name, local_file_name, full_path_to_file2)

        sys.stdout.write("Sample finished running. When you hit <any key>, the sample will be deleted and the sample "
                         "application will exit.")
        sys.stdout.flush()
        input()

        # Clean up resources. This includes the container and the temp files
        block_blob_service.delete_container(container_name)
        os.remove(full_path_to_file2)
    except Exception as e:
        print(e)
Esempio n. 25
0
def generateAzureInputStore(bs_config, stor_name, stor_key, source_dir):
    '''
    Loads a folder of images with the appropriate filenames into the Azure Blob Storage dir so they are accessible to Input
    workers running in the cloud
    '''

    bs_dir_name = bs_config['data']['blob-storage-con']

    storage_uri = ""
    block_blob_service = BlockBlobService(account_name=stor_name,
                                          account_key=stor_key)
    #, endpoint_suffix="core.usgovcloudapi.net") #addendum for use on gov't
    block_blob_service.create_container(bs_dir_name)
    logging.debug("Connected to blob service {0}".format(stor_name))

    image_count = 0
    for dir_path, dir_names, file_names in os.walk(source_dir, topdown=True):
        for file_name in file_names:
            dir_components = Path(dir_path)
            usage = dir_components.parts[len(dir_components.parts) - 1]
            entity = dir_components.parts[len(dir_components.parts) - 2]

            image_file = open(os.path.join(dir_path, file_name), 'rb').read()

            #Calculates image hash, infers the purpose of an image from its folder position and generate a filename
            hash = hashlib.md5(image_file).hexdigest()
            blob_name = usage + "-" + entity + "-" + str(hash)

            #Uploads to the Azure Blob Store
            block_blob_service.create_blob_from_path(
                bs_dir_name, blob_name, dir_path + "/" + file_name)
            logging.info(
                "File written to blob container {0} from {1} {2}".format(
                    bs_dir_name, os.path.join(dir_path, file_name), blob_name))

            #Renames the image in place
            os.rename(os.path.join(dir_path, file_name),
                      os.path.join(dir_path, blob_name))
            logging.info("Renamed {0} to {1}".format(
                os.path.join(dir_path, file_name),
                os.path.join(dir_path, blob_name)))
            image_count += 1
    logging.info("Wrote {0} images total to blob storage".format(image_count))
Esempio n. 26
0
    def StoreExtractedTable(self,images):
        block_blob_service = BlockBlobService(account_name='tableextractor',account_key='OA15EYbHBnD3X+p17r30L59gZOVV91Lht5Y0tLidf/xdVexI0UVKoy8Z+/mYX+cFepSVMSElZeIrLYznZ22y2A==  ')
        container_name = 'extracted-table-images'
        block_blob_service.create_container(container_name)
        # local_path = '/home/vikas/NeuroNer/NeuroNER-master/src/BlobStorage/'
        local_path = "C:\\Users\\M1049308\\Desktop\\tableExtract\\ExtracedTables"
        for files in os.listdir(local_path):
            block_blob_service.create_blob_from_path(container_name,files,os.path.join(local_path, files))
            os.remove(os.path.join(local_path, files))

        print('\nList blobs in the container')
        generator = block_blob_service.list_blobs(container_name)
        print(generator)
        ImageNames = []
        for blob in generator:
            print('\t Blob name: ' + blob.name)
            ImageNames.append(blob.name)
        
        return jsonify(BlobNames=ImageNames)
Esempio n. 27
0
    def WriteBlob(self, blob_name, value):
        """ 単一 BLOB ファイルを作成しテキストを保存する。 """
        szRet = ""
        if (LogWriter.DEBUG_MODE):
            return ("Debug モードのため書き込みをしません。")

        try:
            #blob_name = r'sample.txt';

            szRet = "BlockBlobService"
            blob_service = BlockBlobService(self._name, self._key)

            szRet = "create_container"
            blob_service.create_container(LogWriter.LOG_CONTAINER_NAME,
                                          public_access=PublicAccess.Blob)

            szRet = "create_blob_from_bytes"
            #blob_service.create_blob_from_bytes(
            #    log_container_name,
            #    log_blob_name,
            #    b'<center><h1>Hello World!</h1></center>',
            #    content_settings=ContentSettings('text/html')
            #)

            if (isinstance(value, str)):
                szRet = "create_blob_from_text"
                blob_service.create_blob_from_text(
                    LogWriter.LOG_CONTAINER_NAME, blob_name, value)
            else:
                szRet = "create_blob_from_stream"
                blob_service.create_blob_from_stream(
                    LogWriter.LOG_CONTAINER_NAME, blob_name, io.BytesIO(value))
            #}if

            #szRet = "make_blob_url"
            #print(blob_service.make_blob_url(log_container_name, log_blob_name))

            szRet = "OK"
        except:
            print(r"Exception.")
        #try

        return szRet
class AzureStorage(Storage):
    def __init__(self, option=None):
        self.block_blob_service = \
            BlockBlobService(account_name=settings.AZURE_STORAGE_ACCOUNT_NAME,
                             account_key=settings.AZURE_STORAGE_ACCOUNT_KEY)
        self.block_blob_service.create_container(
            settings.AZURE_STORAGE_DEFAULT_CONTAINER)

    def _save(self, name, content):
        content.open()
        content_stream = content.read()
        self.block_blob_service.create_blob_from_bytes(
            'media',
            name,
            content_stream,
            content_settings=(ContentSettings(
                content_type=content.file.content_type)))
        return name

    def _open(self, name, mode='rb'):
        extension_index = name.rfind('.')
        extension = ''
        if extension_index != -1:
            extension = name[extension_index:]
        tmp_file = tempfile.NamedTemporaryFile(suffix=extension, delete=False)
        self.block_blob_service.get_blob_to_stream(
            container_name=settings.AZURE_STORAGE_DEFAULT_CONTAINER,
            blob_name=name,
            stream=tmp_file,
            max_connections=2)
        tmp_file.seek(0)

        return File(tmp_file)

    def exists(self, name):
        generator = self.block_blob_service.list_blobs('media')
        for blob in generator:
            if name == blob.name:
                return True
        return False

    def url(self, name):
        return self.name
    def test_request_callback_signed_header(self):
        # Arrange
        service = BlockBlobService(self.account_name, self.account_key)
        name = self.get_resource_name('cont')

        # Act
        def callback(request):
            if request.method == 'PUT':
                request.headers['x-ms-meta-hello'] = 'world'

        service.request_callback = callback

        # Assert
        try:
            service.create_container(name)
            metadata = service.get_container_metadata(name)
            self.assertEqual(metadata, {'hello': 'world'})
        finally:
            service.delete_container(name)
Esempio n. 30
0
def temp(params):
    block_blob_service = BlockBlobService(
        account_name='rvsafeimages',
        account_key=
        '391TMmlvDdRWu+AsNX+ZMl1i233YQfP5dxo/xhMrPm22KtwWwwMmM9vFAJpJHrGXyBrTW4OoAInjHnby9Couug=='
    )
    container_name = 'imagescontainer'
    #params = params+".jpg"
    block_blob_service.create_container(container_name)
    block_blob_service.set_container_acl(container_name,
                                         public_access=PublicAccess.Container)
    # full_path_to_file2 = os.getcwd()+"/test.jpg"
    # block_blob_service.get_blob_to_path(container_name, params, full_path_to_file2)
    #import pdb; pdb.set_trace()
    data = block_blob_service.get_blob_to_bytes(container_name, params)
    # data = open(full_path_to_file2,"rb").read()
    #import pdb; pdb.set_trace()
    res = Response(data.content, status=200, mimetype="image/jpeg")
    return res
Esempio n. 31
0
def connect(config=False):
    import lib.misc as misc
    from azure.storage.blob import BlockBlobService as BlobService
    global blob_service, container
    # Connect to the cloud service.
    if not config: config = misc.config['_private']

    container = 'streams'

    if not 'azure' in config:
        logging.debug("no azure config")
        return None, None

    if not blob_service:
        blob_service = BlobService(config['azure']['storage_account_name'],
                                   config['azure']['primary_access_key'])
        blob_service.create_container(container)

    return blob_service, container
Esempio n. 32
0
def upload_sequence_data(request, sequencing_run_pk):
    sequencing_run = get_object_or_404(SequencingRun, pk=sequencing_run_pk)
    check_uploaded_seqids(sequencing_run=sequencing_run)
    if request.method == 'POST':
        container_name = sequencing_run.run_name.lower().replace('_', '-')
        blob_client = BlockBlobService(
            account_name=settings.AZURE_ACCOUNT_NAME,
            account_key=settings.AZURE_ACCOUNT_KEY)
        blob_client.create_container(container_name)
        for i in range(0, len(request.FILES)):
            item = request.FILES.get('file[%d]' % i)
            blob_client.create_blob_from_bytes(container_name=container_name,
                                               blob_name=item.name,
                                               blob=item.read())

        # return redirect('cowbat:cowbat_processing', sequencing_run_pk=sequencing_run.pk)
    return render(request, 'cowbat/upload_sequence_data.html', {
        'sequencing_run': sequencing_run,
    })
Esempio n. 33
0
def main():
    args = parsing_options()
    az_conf = read_az_conf(args.azureConf)
    block_blob_service = BlockBlobService(
        account_name=az_conf['storage_account_name'],
        account_key=az_conf['storage_account_key'])
    block_blob_service.create_container('mycontainer')
    block_blob_service.create_blob_from_path(
        'mycontainer',
        'test.zip',
        '/tmp/test.zip',
        content_settings=ContentSettings(
            content_type='application/zip',
            content_md5='M2E5OTI1N2ZmMTRiNDExNzk1ZmFiMDEyZjQ3OGQ3ODIKi'))
    generator = block_blob_service.list_blobs('mycontainer')
    for blob in generator:
        print blob.name
        print blob.properties
        print blob.metadata
Esempio n. 34
0
    def test_request_callback_signed_header(self):
        # Arrange
        service = BlockBlobService(self.account_name, self.account_key)
        name = self.get_resource_name('cont')

        # Act
        def callback(request):
            if request.method == 'PUT':
                request.headers['x-ms-meta-hello'] = 'world'

        service.request_callback = callback

        # Assert
        try:
            service.create_container(name)
            metadata = service.get_container_metadata(name)
            self.assertEqual(metadata, {'hello': 'world'})
        finally:
            service.delete_container(name)
Esempio n. 35
0
def copy_to_azure(file_uuid):
    try:
        CDN_BASE = "https://ucarecdn.com/"
        file_url = CDN_BASE + file_uuid + '/'
        block_blob_service = BlockBlobService(
            account_name='uploadcaretest',
            #take out account key to filesystem or database
            account_key='')
        block_blob_service.create_container('image_container')
        block_blob_service.set_container_acl(
            'image_container', public_access=PublicAccess.Container)
        filename = wget.download(file_url)
        block_blob_service.create_blob_from_path(
            'image_container',
            uuid,
            'filename',
            content_settings=ContentSettings(content_type='image/png'))
    except Exception, e:
        print(e.message)
Esempio n. 36
0
def uploadImg(myblob):
    from azure.storage.blob import BlockBlobService
    block_blob_service = BlockBlobService(
        account_name='eventdetect',
        account_key=
        'VdqUAaFmd5K8bF5Pp+wt6cDfYUWiAtR2ib7+rKP76sqgJwSo0+friYmuVd+Y5oEWDh6/4oaRa423fXproar3aw=='
    )
    block_blob_service.create_container('mycontainer')
    from azure.storage.blob import PublicAccess
    block_blob_service.create_container('mycontainer',
                                        public_access=PublicAccess.Container)
    block_blob_service.set_container_acl('mycontainer',
                                         public_access=PublicAccess.Container)
    from azure.storage.blob import ContentSettings
    block_blob_service.create_blob_from_path(
        'mycontainer',
        myblob,
        'Images\\' + myblob,
        content_settings=ContentSettings(content_type='image/jpg'))
Esempio n. 37
0
def Blob_upload(path = PATH) :
    account_name = 'mailsys'
    account_key = 'Ql9zTFbmOolovYH7wnVCKqzSV9aQg3owEKviAXCANK1RiRmSp++kWy5HbReqsSPMZv4M8EUtKUGGuwvCubHn6w=='
    container_name = 'surgerydata'

    # Create the BlockBlockService that is used to call the Blob service for the storage account.
    service = BlockBlobService(account_name = account_name, account_key = account_key)

    # Create a container called 'surgerydata'.
    if not service.exists(container_name) :
        print("create container : ", container_name)
        service.create_container(container_name)

    else :
        print("container name : " + container_name)
 
    print("accounnt : " + account_name)
    
    # Set the permission so the blobs are public.
    service.set_container_acl(container_name, public_access = PublicAccess.Container)

    # get data path and list
    abs_path = os.path.join(os.getcwd(), path)
    list_video = os.listdir(abs_path)

    for name in list_video :
        path_tmp = os.path.join(abs_path, name)

        print("Temp file : " + path_tmp)
        print("\nUploading to Blob storage as blob : " + name)
        print("\nUploading...\n")

        start_time = time.time()

        service.create_blob_from_path(container_name, name, path_tmp)
        
        print("Upload complete!!")
        print("Time taken {} sec" .format(time.time() - start_time))

        os.remove(path_tmp)

    return list_video
Esempio n. 38
0
def download():

    # Create the BlockBlockService that is used to call the Blob service for the storage account
    block_blob_service = BlockBlobService(account_name='<account name>',
                                          account_key='<account key>')

    # Create a container called 'quickstartblobs'.
    container_name = '<container name>'
    block_blob_service.create_container(container_name)

    # name of the container
    generator = block_blob_service.list_blobs(container_name)

    # code below lists all the blobs in the container and downloads them one after another
    dpath = "C:\\Users\\<user name>\\Desktop\\data\\"  #replace <user name> with your user name

    for blob in generator:
        print(blob.name)
        print("{}".format(blob.name))
        # check if the path contains a folder structure, create the folder structure
        if "/" in "{}".format(blob.name):
            print("there is a path in this")
            # extract the folder path and check if that folder exists locally, and if not create it
            head, tail = os.path.split("{}".format(blob.name))
            print(head)
            print(tail)
            if (os.path.isdir(dpath + "\\" + head)):
                # download the files to this directory
                print("directory and sub directories exist")
                block_blob_service.get_blob_to_path(
                    'suyash', blob.name, dpath + "\\" + head + "\\" + tail)
            else:
                # create the diretcory and download the file to it
                print("directory doesn't exist, creating it now")
                os.makedirs(dpath + "\\" + head, exist_ok=True)
                print("directory created, download initiated")
                block_blob_service.get_blob_to_path(
                    '<container name>', blob.name,
                    dpath + "\\" + head + "\\" + tail)
        else:
            block_blob_service.get_blob_to_path('<container name>', blob.name,
                                                blob.name)
Esempio n. 39
0
def uploadFolderCsvToBlob(storage_account, storage_key, container, in_dir, blob_out_dir):
    """ Uploads csvs in a folder into blob storage
    """
    # Create blob service
    block_blob_service = BlockBlobService(account_name=storage_account, account_key=storage_key)

    # Create container if not exists
    if not any(container in c.name for c in block_blob_service.list_containers()):
        block_blob_service.create_container(container, fail_on_exist=False)

    # Upload the CSV file to Azure cloud
    for root, dirs, filenames in os.walk(in_dir):
        for filename in filenames:
            if os.path.splitext(filename)[-1] == ".csv":
                blob_name = blob_out_dir + '/' + filename
                file_path = os.path.join(root, filename)
                # Upload the CSV file to Azure cloud
                block_blob_service.create_blob_from_path(
                    container, blob_name, file_path,
                    content_settings=ContentSettings(content_type='application/CSV'))
Esempio n. 40
0
def make_public_container(STORAGE_NAME, STORAGE_KEY, NEW_CONTAINER_NAME):
    """"create blob service, blob container and set it to public access. return blob service"""
    
    blob_service = BlockBlobService(account_name= STORAGE_NAME, account_key=STORAGE_KEY)
    new_container_status = blob_service.create_container(NEW_CONTAINER_NAME) 
    blob_service.set_container_acl(NEW_CONTAINER_NAME, public_access=PublicAccess.Container)
    
    if new_container_status == True:
        print('{} BLOB container has been successfully created: {}'.format(NEW_CONTAINER_NAME, new_container_status))
    else:
        print('{} something went wrong: check parameters and subscription'.format(NEW_CONTAINER_NAME))
Esempio n. 41
0
def setup_file_share(container_name: str = os.getenv(
    'AZURE_CONTAINER_NAME')) -> 'BlockBlobService':
    '''
  Create & setup Azure blob as share
  
  Args:
    container_name (str, optional): Name of container, will 
      default to using environment variable if param is not 
      provided.

  Returns:
    BlockBlobService: Instance of the Block Blob Service.

  '''
    blob_service = BlockBlobService(
        account_name=os.getenv('STORAGE_ACCOUNT_NAME'),
        account_key=os.getenv('STORAGE_ACCOUNT_KEY'))
    blob_service.create_container(container_name, fail_on_exist=False)

    return blob_service
Esempio n. 42
0
    def save_model_file(self, full_path_to_file, local_file_name):
        """Uploads a local file containg a model to the blob container specifed by self.config.models_container_name
            If the container name does not exists it will be created.
        Arguments:
            full_path_to_file {[type]} -- full path to t the local file
            local_file_name {[type]} -- the file name.
        """

        assert (self.config.storage_account)
        assert (self.config.storage_account_key)
        assert (self.config.models_container_name)
        block_blob_service = BlockBlobService(
            account_name=self.config.storage_account,
            account_key=self.config.storage_account_key)
        block_blob_service.create_container(self.config.models_container_name,
                                            fail_on_exist=False,
                                            public_access=None)
        block_blob_service.create_blob_from_path(
            self.config.models_container_name, local_file_name,
            full_path_to_file)
Esempio n. 43
0
    def _client(self):
        """Return the Azure Storage Block Blob service.

        If this is the first call to the property, the client is created and
        the container is created if it doesn't yet exist.

        """
        client = BlockBlobService(connection_string=self._connection_string)

        created = client.create_container(
            container_name=self._container_name, fail_on_exist=False)

        if created:
            LOGGER.info("Created Azure Blob Storage container %s",
                        self._container_name)

        client.retry = ExponentialRetry(
            initial_backoff=self._retry_initial_backoff_sec,
            increment_base=self._retry_increment_base,
            max_attempts=self._retry_max_attempts).retry

        return client
Esempio n. 44
0
class InstanceStorage(object):

    def __init__(self, group_name, location=None, create_if_not_exist=True):

        client = arm.instance()
        sm = client.storage.storage_accounts

        # Check existence of a storage account in the resource group
        # TODO : better with rm.list_resources for direct filtering
        # but issing doc on Genric filtering format
        # So, taking the first result of the iterator : Ouch !
        new=True
        for sa in sm.list_by_resource_group(group_name):
            new=False
            self.name = sa.name
            self.location=sa.location
            logger.debug("Found SA %s" % self.name)
            break

        if new:
            logger.info("Creating storage account...")
            #Generating unique name for Azure
            unique_name = "%s%s" % (
                    str(group_name).translate(None, '-_.').lower(),
                    arm.id_generator()
                )
            # TODO : Check how to deal with account type...
            # Warning : the name of the storageaccount cannot be > 24 chars
            self.location=location
            result = sm.create(
                group_name,
                unique_name[:24],
                StorageAccountCreateParameters(
                    location=self.location,
                    account_type=AccountType.standard_lrs
                )
            )

            # Asysnchronous operation, so wait...
            res = result.result()
            self.name = res.name

        # retrieve the keys and store them in the instance
        self.keys = sm.list_keys(group_name, self.name)
        logger.debug("Key1 : %s " % repr(self.keys.key1))
        logger.debug("Key2 : %s " % repr(self.keys.key2))

        # retrieve the blob service
        self.blob = BlockBlobService(
            account_name=self.name,
            account_key=self.keys.key1
        )
        # Define the storage tree :
        # sources for the images imported to create the VM
        # vhds for the VM images
        self.sources_container= "sources"
        self.vhds_container= "vhds"
        self.blob.create_container(self.sources_container)
        self.blob.create_container(self.vhds_container)

    def list_blobs(self):
        for blob in self.blob.list_blobs('system'):
            print(blob.name)

    def copy_source_images_from(self, source_storage, container, filepath):
        # Generate a token for 10 minutes read access
        token = source_storage.blob.generate_blob_shared_access_signature(
            container,
            filepath,
            BlobPermissions.READ,
            datetime.utcnow() + timedelta(minutes=10),
        )
        # Generate the sour URL of the blob
        source_url = source_storage.blob.make_blob_url(
            container,
            filepath,
            sas_token=token
        )
        # Launch the copy from the distant storage to the current one
        self.blob.copy_blob(
            self.sources_container,
            os.path.basename(filepath),
            source_url
        )
    def file_uploaded(self, botengine, device_object, file_id, filesize_bytes, content_type, file_extension):
        """
        A device file has been uploaded
        :param botengine: BotEngine environment
        :param device_object: Device object that uploaded the file
        :param file_id: File ID to reference this file at the server
        :param filesize_bytes: The file size in bytes
        :param content_type: The content type, for example 'video/mp4'
        :param file_extension: The file extension, for example 'mp4'
        """
        # We are demonstrating video processing here, so avoid video processing on files that are not videos.
        if "video" not in content_type:
            botengine.get_logger().info("The uploaded file is not a video, skipping processing ...")
            return
            
        # Create full file path
        FILE_NAME = "video." + file_extension
        FILE_PATH= os.path.dirname(os.path.abspath(__file__))
        FULL_FILE_PATH =os.path.join(FILE_PATH, FILE_NAME)

        # Download file to local device
        botengine.download_file(file_id, FILE_NAME)

        # Create Blob service and upload file to Microsoft Blob Storage
        block_blob_service = BlockBlobService(account_name='andresdemo', account_key='6kG0iguGEK9h41shJasZOW4v/uPKr1Guu8RFWfLhWf7MnDcJaaw5eFhhYGSqaFk2qpqX8JIpJobe0bY8MmYa+g==')
        block_blob_service.create_container(CONTAINER_NAME)

        # Set Public Access to container
        # A try and except block is used due to a occassional logger exception (Doesn't imapact function)
        try:
            block_blob_service.set_container_acl(CONTAINER_NAME, public_access=PublicAccess.Container)
        except:
            pass
        
        # Convert the file into a blob and store it in Microsoft Azure Blob Storage
        # A try and except block is used due to a occassional logger exception (Doesn't imapact function)
        try:
            block_blob_service.create_blob_from_path(CONTAINER_NAME, FILE_NAME, FULL_FILE_PATH)
        except:
            pass
        
        # Get Video URL
        url = "https://" + ACCOUNT_NAME + ".blob.core.windows.net/" + CONTAINER_NAME + "/" + FILE_NAME

        # Get Access Token
        token = "";
        headers = {
            'Ocp-Apim-Subscription-Key': API_KEY,
        }
        params = urllib.urlencode({
            'allowEdit': 'True',
        })

        # HTTP GET request to Video Indexer API to acquire access token
        try:
            conn = httplib.HTTPSConnection('api.videoindexer.ai')
            conn.request("GET", "/auth/" + ACCOUNT_LOCATION + "/Accounts/" + ACCOUNT_ID + "/AccessToken?%s" % params, headers=headers)
            response = conn.getresponse()
            token = response.read()
            token = token[1:len(token)-1]
            conn.close()
        except Exception as e:
            print("[Errno {0}] {1}".format(e.errno, e.strerror))
        
        # Use Access Token to upload Video file 
        headers = {
            'Content-Type': 'multipart/form-data',
        }
        params = urllib.urlencode({
            'Content-Type': 'multipart/form-data',
            'videoUrl': url,
            'streamingPreset': 'Default',
            'privacy': "Public"
        })
        try:
            conn = httplib.HTTPSConnection('api.videoindexer.ai')
            conn.request("POST", "/" + ACCOUNT_LOCATION + "/Accounts/" + ACCOUNT_ID + "/Videos?accessToken=" +token + "&name=Sample&%s" % params, headers=headers)
            response = conn.getresponse()
            data = response.read()
            d = json.loads(data)
            conn.close()
        except Exception as e:
            print("[Errno {0}] {1}".format(e.errno, e.strerror))

        botengine.get_logger().info('Video Processing..')

        # Get Video Index
        params = urllib.urlencode({
            'accessToken': token,
            'language': 'English',
        })
        conn = httplib.HTTPSConnection('api.videoindexer.ai')
        conn.request("GET", "/" + ACCOUNT_LOCATION + "/Accounts/" + ACCOUNT_ID + "/Videos/"+d["id"]+"/Index?%s" % params)
        response = conn.getresponse()
        data = response.read()
        result = json.loads(data)
        # Set a timer to queue for results
        # Passing in Video ID and Access Token
        self.start_timer_s(botengine, 5, [d["id"], token])
        return
from datetime import datetime, timedelta

# The name of the new Shared Access policy
policy_name = "readandlistonly"
# The Storage Account Name
storage_account_name = "mystore"
storage_account_key = "mykey"
storage_container_name = "mycontainer"
example_file_path = "..\\sampledata\\sample.log"
policy_name = "mysaspolicy"

# Create the blob service, using the name and key for your Azure Storage account
blob_service = BlockBlobService(storage_account_name, storage_account_key)

# Create the container, if it does not already exist
blob_service.create_container(storage_container_name)

# Upload an example file to the container
blob_service.create_blob_from_path(storage_container_name, "sample.log", example_file_path)

# Create a new policy that expires after a week
access_policy = AccessPolicy(
    permission=ContainerPermissions.READ + ContainerPermissions.LIST, expiry=datetime.utcnow() + timedelta(weeks=1)
)


# Get the existing identifiers (policies) for the container
identifiers = blob_service.get_container_acl(storage_container_name)
# And add the new one ot the list
identifiers[policy_name] = access_policy
Esempio n. 47
0
print(' - Pushing selected files to Azure for ' + Ldir['date_string'])
f_string = 'f' + Ldir['date_string']

# Azure commands
from azure.storage.blob import BlockBlobService
from azure.storage.blob import PublicAccess
ff_string = f_string.replace('.','') # azure does not like dots in container names
# account name and key
azu_dict = Lfun.csv_to_dict(Ldir['data'] + 'accounts/azure_pm_2015.05.25.csv')
account = azu_dict['account']
key = azu_dict['key']
containername = ff_string
# get a handle to the account
blob_service = BlockBlobService(account_name=account, account_key=key)
blob_service.create_container(containername)
blob_service.set_container_acl(containername, public_access=PublicAccess.Container)

# input directory
in_dir = Ldir['roms'] + 'output/' + Ldir['gtagex'] + '/' + f_string + '/'
# output files
out_list = ['ocean_surface.nc', 'low_passed_UBC.nc']

def write_to_azure(out_fn, blob_service, containername, outname):
    # write it to Azure
    try:
        bname = open(out_fn, 'rb')
        blob_service.create_blob_from_stream(containername, out_name, bname)
        print('done putting ' + out_name)
        bname.close()
        result = 'success'
Esempio n. 48
0
class AzureBlobWriter(BaseWriter):
    """
    Writes items to azure blob containers.

        - account_name (str)
            Public acces name of the azure account.

        - account_key (str)
            Public acces key to the azure account.

        - container (str)
            Blob container name.
    """
    supported_options = {
        'account_name': {'type': six.string_types, 'env_fallback': 'EXPORTERS_AZUREWRITER_NAME'},
        'account_key': {'type': six.string_types, 'env_fallback': 'EXPORTERS_AZUREWRITER_KEY'},
        'container': {'type': six.string_types}
    }
    hash_algorithm = 'md5'
    VALID_CONTAINER_NAME_RE = r'[a-zA-Z0-9-]{3,63}'

    def __init__(self, *args, **kw):
        from azure.storage.blob import BlockBlobService
        super(AzureBlobWriter, self).__init__(*args, **kw)
        account_name = self.read_option('account_name')
        account_key = self.read_option('account_key')

        self.container = self.read_option('container')
        if '--' in self.container or not re.match(self.VALID_CONTAINER_NAME_RE, self.container):
            help_url = ('https://azure.microsoft.com/en-us/documentation'
                        '/articles/storage-python-how-to-use-blob-storage/')
            warnings.warn("Container name %s doesn't conform with naming rules (see: %s)"
                          % (self.container, help_url))

        self.azure_service = BlockBlobService(account_name, account_key)
        self.azure_service.create_container(self.container)
        self.logger.info('AzureBlobWriter has been initiated.'
                         'Writing to container {}'.format(self.container))
        self.set_metadata('files_counter', 0)
        self.set_metadata('blobs_written', [])

    def write(self, dump_path, group_key=None):
        self.logger.info('Start uploading {} to {}'.format(dump_path, self.container))
        self._write_blob(dump_path)
        self.set_metadata('files_counter', self.get_metadata('files_counter') + 1)

    @retry_long
    def _write_blob(self, dump_path):
        blob_name = dump_path.split('/')[-1]
        self.azure_service.create_blob_from_path(
            self.read_option('container'),
            blob_name,
            dump_path,
            max_connections=5,
        )
        self.logger.info('Saved {}'.format(blob_name))
        self._update_metadata(dump_path, blob_name)

    def _update_metadata(self, dump_path, blob_name):
        buffer_info = self.write_buffer.metadata[dump_path]
        file_info = {
            'blob_name': blob_name,
            'size': buffer_info['size'],
            'hash': b64encode(unhexlify(buffer_info['file_hash'])),
            'number_of_records': buffer_info['number_of_records']
        }
        self.get_metadata('blobs_written').append(file_info)

    def _check_write_consistency(self):
        from azure.common import AzureMissingResourceHttpError
        for blob_info in self.get_metadata('blobs_written'):
            try:
                blob = self.azure_service.get_blob_properties(
                    self.read_option('container'), blob_info['blob_name'])
                blob_size = blob.properties.content_length
                blob_md5 = blob.properties.content_settings.content_md5
                if str(blob_size) != str(blob_info['size']):
                    raise InconsistentWriteState(
                        'File {} has unexpected size. (expected {} - got {})'.format(
                            blob_info['blob_name'], blob_info['size'], blob_size
                        )
                    )
                if str(blob_md5) != str(blob_info['hash']):
                    raise InconsistentWriteState(
                        'File {} has unexpected hash. (expected {} - got {})'.format(
                            blob_info['blob_name'], blob_info['hash'], blob_md5
                        )
                    )

            except AzureMissingResourceHttpError:
                raise InconsistentWriteState('Missing blob {}'.format(blob_info['blob_name']))
        self.logger.info('Consistency check passed')
Esempio n. 49
0
from azure.storage.blob import BlockBlobService
from threading import Thread
import csv
import json
import sys
import time
import smtplib
import string


print 'Connecting to Azure...'
# Azure 
name='perfectndsa'
key = '41eoKtqzIXvovIzQhWYi5ZK+JY7PU1/6Tp5fHpnFm9v37z1OPRTCWaJ1hhhQh6rqeF+u3BKXjaUAJ8SgXnhzWQ=='
block_blob = BlockBlobService(account_name=name, account_key=key)
block_blob.create_container('example')

print 'Initializing sensor...'
# Sensor
hat = SenseHat()

# Send data to azure method
def send_data(data):
    json_data = json.dumps(data)
    block_blob.create_blob_from_text("example", 'data.json', json_data)

# Prepare output csv
with open('data.csv', 'w') as data_file:
    writer = csv.writer(data_file, delimiter=',')
    writer.writerow(["Timestamp", "Temperature_hum (Cº)", "Temperature_press (Cº)", "Humidity (%rH)", "Pressure (mBars)"])
Esempio n. 50
0
    print("The storage account was taken of a EXISTING storage account.")


'''
Get the keys for create a container where would be the the virtual hard disks
'''
keys = sms.get_storage_account_keys(name_account)
primary_key = keys.storage_service_keys.primary
'''
Create a container
'''
block_blob_service = BlockBlobService(account_name = name_account, account_key = primary_key )
name_container = "azbrcontainer" + str(hash_number)
list_containers = list(block_blob_service.list_containers())
if len(list_containers)==0:
    container = block_blob_service.create_container(name_container)
    #wait_for_async(container.request_id, 'Creating storage account', 600)


list_containers = list(block_blob_service.list_containers())
container = list_containers[0]
name_container = container.name

block_blob_service.set_container_acl(name_container, public_access=PublicAccess.Container)


''' FINISHING CREAION OF ACCOUNTS OR GET EXISTEN ACCOUNDS'''


'''Create of deplyment and in each deployment a machine'''
 
Esempio n. 51
0
class AzureStorage(object):
    """
        Storage object representing files on an Azure Storage container.

        Usage::

            from flask_admin.contrib.fileadmin import BaseFileAdmin
            from flask_admin.contrib.fileadmin.azure import AzureStorage

            class MyAzureAdmin(BaseFileAdmin):
                # Configure your class however you like
                pass

            fileadmin_view = MyAzureAdmin(storage=AzureStorage(...))

    """
    _fakedir = '.dir'
    _copy_poll_interval_seconds = 1
    _send_file_lookback = timedelta(minutes=15)
    _send_file_validity = timedelta(hours=1)
    separator = '/'

    def __init__(self, container_name, connection_string):
        """
            Constructor

            :param container_name:
                Name of the container that the files are on.

            :param connection_string:
                Azure Blob Storage Connection String
        """

        if not BlockBlobService:
            raise ValueError('Could not import Azure Blob Storage SDK. '
                             'You can install the SDK using '
                             'pip install azure-storage-blob')

        self._container_name = container_name
        self._connection_string = connection_string
        self.__client = None

    @property
    def _client(self):
        if not self.__client:
            self.__client = BlockBlobService(
                connection_string=self._connection_string)
            self.__client.create_container(
                self._container_name, fail_on_exist=False)
        return self.__client

    @classmethod
    def _get_blob_last_modified(cls, blob):
        last_modified = blob.properties.last_modified
        tzinfo = last_modified.tzinfo
        epoch = last_modified - datetime(1970, 1, 1, tzinfo=tzinfo)
        return epoch.total_seconds()

    @classmethod
    def _ensure_blob_path(cls, path):
        if path is None:
            return None

        path_parts = path.split(op.sep)
        return cls.separator.join(path_parts).lstrip(cls.separator)

    def get_files(self, path, directory):
        if directory and path != directory:
            path = op.join(path, directory)

        path = self._ensure_blob_path(path)
        directory = self._ensure_blob_path(directory)

        path_parts = path.split(self.separator) if path else []
        num_path_parts = len(path_parts)
        folders = set()
        files = []

        for blob in self._client.list_blobs(self._container_name, path):
            blob_path_parts = blob.name.split(self.separator)
            name = blob_path_parts.pop()

            blob_is_file_at_current_level = blob_path_parts == path_parts
            blob_is_directory_file = name == self._fakedir

            if blob_is_file_at_current_level and not blob_is_directory_file:
                rel_path = blob.name
                is_dir = False
                size = blob.properties.content_length
                last_modified = self._get_blob_last_modified(blob)
                files.append((name, rel_path, is_dir, size, last_modified))
            else:
                next_level_folder = blob_path_parts[:num_path_parts + 1]
                folder_name = self.separator.join(next_level_folder)
                folders.add(folder_name)

        folders.discard(directory)
        for folder in folders:
            name = folder.split(self.separator)[-1]
            rel_path = folder
            is_dir = True
            size = 0
            last_modified = 0
            files.append((name, rel_path, is_dir, size, last_modified))

        return files

    def is_dir(self, path):
        path = self._ensure_blob_path(path)

        num_blobs = 0
        for blob in self._client.list_blobs(self._container_name, path):
            blob_path_parts = blob.name.split(self.separator)
            is_explicit_directory = blob_path_parts[-1] == self._fakedir
            if is_explicit_directory:
                return True

            num_blobs += 1
            path_cannot_be_leaf = num_blobs >= 2
            if path_cannot_be_leaf:
                return True

        return False

    def path_exists(self, path):
        path = self._ensure_blob_path(path)

        if path == self.get_base_path():
            return True

        try:
            next(iter(self._client.list_blobs(self._container_name, path)))
        except StopIteration:
            return False
        else:
            return True

    def get_base_path(self):
        return ''

    def get_breadcrumbs(self, path):
        path = self._ensure_blob_path(path)

        accumulator = []
        breadcrumbs = []
        for folder in path.split(self.separator):
            accumulator.append(folder)
            breadcrumbs.append((folder, self.separator.join(accumulator)))
        return breadcrumbs

    def send_file(self, file_path):
        file_path = self._ensure_blob_path(file_path)

        if not self._client.exists(self._container_name, file_path):
            raise ValueError()

        now = datetime.utcnow()
        url = self._client.make_blob_url(self._container_name, file_path)
        sas = self._client.generate_blob_shared_access_signature(
            self._container_name, file_path,
            BlobPermissions.READ,
            expiry=now + self._send_file_validity,
            start=now - self._send_file_lookback)
        return redirect('%s?%s' % (url, sas))

    def read_file(self, path):
        path = self._ensure_blob_path(path)

        blob = self._client.get_blob_to_bytes(self._container_name, path)
        return blob.content

    def write_file(self, path, content):
        path = self._ensure_blob_path(path)

        self._client.create_blob_from_text(self._container_name, path, content)

    def save_file(self, path, file_data):
        path = self._ensure_blob_path(path)

        self._client.create_blob_from_stream(self._container_name, path,
                                             file_data.stream)

    def delete_tree(self, directory):
        directory = self._ensure_blob_path(directory)

        for blob in self._client.list_blobs(self._container_name, directory):
            self._client.delete_blob(self._container_name, blob.name)

    def delete_file(self, file_path):
        file_path = self._ensure_blob_path(file_path)

        self._client.delete_blob(self._container_name, file_path)

    def make_dir(self, path, directory):
        path = self._ensure_blob_path(path)
        directory = self._ensure_blob_path(directory)

        blob = self.separator.join([path, directory, self._fakedir])
        blob = blob.lstrip(self.separator)
        self._client.create_blob_from_text(self._container_name, blob, '')

    def _copy_blob(self, src, dst):
        src_url = self._client.make_blob_url(self._container_name, src)
        copy = self._client.copy_blob(self._container_name, dst, src_url)
        while copy.status != 'success':
            sleep(self._copy_poll_interval_seconds)
            copy = self._client.get_blob_properties(
                self._container_name, dst).properties.copy

    def _rename_file(self, src, dst):
        self._copy_blob(src, dst)
        self.delete_file(src)

    def _rename_directory(self, src, dst):
        for blob in self._client.list_blobs(self._container_name, src):
            self._rename_file(blob.name, blob.name.replace(src, dst, 1))

    def rename_path(self, src, dst):
        src = self._ensure_blob_path(src)
        dst = self._ensure_blob_path(dst)

        if self.is_dir(src):
            self._rename_directory(src, dst)
        else:
            self._rename_file(src, dst)
class AzureIOStore(IOStore):
    """
    A class that lets you get input from and send output to Azure Storage.
    
    """
    
    def __init__(self, account_name, container_name, name_prefix=""):
        """
        Make a new AzureIOStore that reads from and writes to the given
        container in the given account, adding the given prefix to keys. All
        paths will be interpreted as keys or key prefixes.
        
        If the name prefix does not end with a trailing slash, and is not empty,
        one will be added automatically.
        
        Account keys are retrieved from the AZURE_ACCOUNT_KEY environment
        variable or from the ~/.toilAzureCredentials file, as in Toil itself.
        
        """
        
        # Make sure azure libraries actually loaded
        assert(have_azure)
        
        self.account_name = account_name
        self.container_name = container_name
        self.name_prefix = name_prefix
        
        if self.name_prefix != "" and not self.name_prefix.endswith("/"):
            # Make sure it has the trailing slash required.
            self.name_prefix += "/"
        
        # Sneak into Toil and use the same keys it uses
        self.account_key = toil.jobStores.azureJobStore._fetchAzureAccountKey(
            self.account_name)
            
        # This will hold out Azure blob store connection
        self.connection = None
        
    def __getstate__(self):
        """
        Return the state to use for pickling. We don't want to try and pickle
        an open Azure connection.
        """
     
        return (self.account_name, self.account_key, self.container_name, 
            self.name_prefix)
        
    def __setstate__(self, state):
        """
        Set up after unpickling.
        """
        
        self.account_name = state[0]
        self.account_key = state[1]
        self.container_name = state[2]
        self.name_prefix = state[3]
        
        self.connection = None
        
    def __connect(self):
        """
        Make sure we have an Azure connection, and set one up if we don't.
        """
        
        if self.connection is None:
            RealTimeLogger.get().debug("Connecting to account {}, using "
                "container {} and prefix {}".format(self.account_name,
                self.container_name, self.name_prefix))
        
            # Connect to the blob service where we keep everything
            self.connection = BlockBlobService(
                account_name=self.account_name, account_key=self.account_key)
            
    @backoff        
    def read_input_file(self, input_path, local_path):
        """
        Get input from Azure.
        """
        
        self.__connect()
        
        
        RealTimeLogger.get().debug("Loading {} from AzureIOStore".format(
            input_path))
        
        # Download the blob. This is known to be synchronous, although it can
        # call a callback during the process.
        self.connection.get_blob_to_path(self.container_name,
            self.name_prefix + input_path, local_path)
            
    def list_input_directory(self, input_path, recursive=False,
        with_times=False):
        """
        Loop over fake /-delimited directories on Azure. The prefix may or may
        not not have a trailing slash; if not, one will be added automatically.
        
        Returns the names of files and fake directories in the given input fake
        directory, non-recursively.
        
        If with_times is specified, will yield (name, time) pairs including
        modification times as datetime objects. Times on directories are None.
        
        """
        
        self.__connect()
        
        RealTimeLogger.get().info("Enumerating {} from AzureIOStore".format(
            input_path))
        
        # Work out what the directory name to list is
        fake_directory = self.name_prefix + input_path
        
        if fake_directory != "" and not fake_directory.endswith("/"):
            # We have a nonempty prefix, and we need to end it with a slash
            fake_directory += "/"
        
        # This will hold the marker that we need to send back to get the next
        # page, if there is one. See <http://stackoverflow.com/a/24303682>
        marker = None
        
        # This holds the subdirectories we found; we yield each exactly once if
        # we aren't recursing.
        subdirectories = set()
        
        while True:
        
            # Get the results from Azure. We don't use delimiter since Azure
            # doesn't seem to provide the placeholder entries it's supposed to.
            result = self.connection.list_blobs(self.container_name, 
                prefix=fake_directory, marker=marker)
                
            RealTimeLogger.get().info("Found {} files".format(len(result)))
                
            for blob in result:
                # Yield each result's blob name, but directory names only once
                
                # Drop the common prefix
                relative_path = blob.name[len(fake_directory):]
                
                if (not recursive) and "/" in relative_path:
                    # We found a file in a subdirectory, and we aren't supposed
                    # to be recursing.
                    subdirectory, _ = relative_path.split("/", 1)
                    
                    if subdirectory not in subdirectories:
                        # It's a new subdirectory. Yield and remember it
                        subdirectories.add(subdirectory)
                        
                        if with_times:
                            yield subdirectory, None
                        else:
                            yield subdirectory
                else:
                    # We found an actual file 
                    if with_times:
                        mtime = blob.properties.last_modified
                        
                        if isinstance(mtime, datetime.datetime):
                            # Make sure we're getting proper localized datetimes
                            # from the new Azure Storage API.
                            assert(mtime.tzinfo is not None and
                                mtime.tzinfo.utcoffset(mtime) is not None)
                        else:
                            # Convert mtime from a string as in the old API.
                            mtime = dateutil.parser.parse(mtime).replace(
                                tzinfo=dateutil.tz.tzutc())
                            
                        yield relative_path, mtime
                            
                    else:
                        yield relative_path
                
            # Save the marker
            marker = result.next_marker
                
            if not marker:
                break
                
    @backoff
    def write_output_file(self, local_path, output_path):
        """
        Write output to Azure. Will create the container if necessary.
        """
        
        self.__connect()
        
        RealTimeLogger.get().debug("Saving {} to AzureIOStore".format(
            output_path))
        
        try:
            # Make the container
            self.connection.create_container(self.container_name)
        except azure.WindowsAzureConflictError:
            # The container probably already exists
            pass
        
        # Upload the blob (synchronously)
        # TODO: catch no container error here, make the container, and retry
        self.connection.put_block_blob_from_path(self.container_name,
            self.name_prefix + output_path, local_path)
    
    @backoff        
    def exists(self, path):
        """
        Returns true if the given input or output file exists in Azure already.
        
        """
        
        self.__connect()
        
        marker = None
        
        while True:
        
            # Get the results from Azure.
            result = self.connection.list_blobs(self.container_name, 
                prefix=self.name_prefix + path, marker=marker)
                
            for blob in result:
                # Look at each blob
                
                if blob.name == self.name_prefix + path:
                    # Found it
                    return True
                
            # Save the marker
            marker = result.next_marker
                
            if not marker:
                break 
        
        return False
        
        
    @backoff        
    def get_mtime(self, path):
        """
        Returns the modification time of the given blob if it exists, or None
        otherwise.
        
        """
        
        self.__connect()
        
        marker = None
        
        while True:
        
            # Get the results from Azure.
            result = self.connection.list_blobs(self.container_name, 
                prefix=self.name_prefix + path, marker=marker)
                
            for blob in result:
                # Look at each blob
                
                if blob.name == self.name_prefix + path:
                    # Found it
                    mtime = blob.properties.last_modified
                        
                    if isinstance(mtime, datetime.datetime):
                        # Make sure we're getting proper localized datetimes
                        # from the new Azure Storage API.
                        assert(mtime.tzinfo is not None and
                            mtime.tzinfo.utcoffset(mtime) is not None)
                    else:
                        # Convert mtime from a string as in the old API.
                        mtime = dateutil.parser.parse(mtime).replace(
                            tzinfo=dateutil.tz.tzutc())
                            
                    return mtime
                
            # Save the marker
            marker = result.next_marker
                
            if not marker:
                break 
        
        return None
        
    @backoff        
    def get_size(self, path):
        """
        Returns the size in bytes of the given blob if it exists, or None
        otherwise.
        
        """
        
        self.__connect()
        
        marker = None
        
        while True:
        
            # Get the results from Azure.
            result = self.connection.list_blobs(self.container_name, 
                prefix=self.name_prefix + path, marker=marker)
                
            for blob in result:
                # Look at each blob
                
                if blob.name == self.name_prefix + path:
                    # Found it
                    size = blob.properties.content_length
                    
                    return size
                
            # Save the marker
            marker = result.next_marker
                
            if not marker:
                break 
        
        return None
Esempio n. 53
0
class AzureStorage(plugins.IStoragePlugin):

    def configure(self, config):
        self.storage = BlockBlobService(account_name=config['account_name'], account_key=config['account_key'])
        self.container = config['container']
        try:
            container = self.storage.get_container_properties(self.container)
            log.info("Configuring Azure blob storage %s/%s", self.storage.account_name, self.container)
        except AzureMissingResourceHttpError as e:
            log.warning("Container '%s' is missing in account '%s', trying to create new", self.container, self.storage.account_name)
            try:
                self.storage.create_container(self.container)
                self.storage.set_container_acl(self.container, public_access=PublicAccess.Container)
            except Exception as e:
                log.critical("Cannot create new container: %s", e)
                raise plugins.PluginInitException("Cannot create new container")
        except AzureHttpError as e:
            log.critical("Cannot access container '%s' in account '%s': %s", self.container, self.storage.account_name, e)
            raise plugins.PluginInitException("Cannot access container")
        except Exception as e:
            log.critical("Cannot access container '%s' in account '%s': %s", self.container, self.storage.account_name, e)
            raise plugins.PluginInitException("Cannot access container")

    def delete(self, key):
        log.info("Deleting file '%s' from %s/%s", key, self.storage.account_name, self.container)
        try:
            self.storage.delete_blob(self.container, key)
        except AzureMissingResourceHttpError:
            log.error("File '%s' was not found in %s/%s", key, self.storage.account_name, self.container)
            raise common.NotFound('File not found')
        except Exception as e:
            log.error("Cannot delete '%s' from %s/%s: %s", key, self.storage.account_name, self.container, e)
            raise common.FatalError(e)

    def put(self, key, filename=None, file=None):
        storage_key = key
        try:
            if filename:
                log.debug("Uploading %s to %s", filename, self.storage.make_blob_url(self.container, storage_key))
                self.storage.create_blob_from_path(self.container, storage_key, filename, content_settings=ContentSettings(content_type='application/octet-stream'))
            elif file:
                old_pos = file.tell()
                file.seek(0)
                log.debug("Uploading from stream to %s", self.storage.make_blob_url(self.container, storage_key))
                self.storage.create_blob_from_stream(self.container, storage_key, file, content_settings=ContentSettings(content_type='application/octet-stream'))
                file.seek(old_pos)
        except Exception as e:
            # TODO: more detailed error inspection
            log.critical("Error uploading to %s/%s: %s", self.storage.account_name, self.container, e)
            raise common.FatalError(e)
        return storage_key


    def get(self, key, stream):
        # current azure python sdk barely can work with non-seekable streams,
        # so we have to implement chunking by our own
        # TODO: proper ranging? RFC says server SHOULD return 406 once range is unsatisfiable, 
        # but Azure is OK with end pos > blob length unless blob is not empty
        chunk_size = 4*1024*1024
        chunk_start = 0
        chunk_end = chunk_size - 1
        while True:
            try:
                chunk = self.storage._get_blob(self.container, key, start_range=chunk_start, end_range=chunk_end)
                log.debug("Writing %s bytes from %s", len(chunk.content), chunk_start)
                stream.write(chunk.content)
            except IOError:
                # remote side closed connection
                return
            except AzureMissingResourceHttpError as e:
                raise common.NotFound(e)
            except (AzureHttpError, AzureException) as e:
                raise common.TemporaryError('Error while downloading {}: {}'.format(key, e))

            blob_length = int(chunk.properties.content_range.split('/')[1])
            chunk_start, chunk_end, blob_size = map(int, re.match(r'^bytes\s+(\d+)-(\d+)/(\d+)$', chunk.properties.content_range).groups())
            if chunk_end == blob_size - 1:
                # no more data to stream
                break
            else:
                chunk_start = chunk_end + 1
                chunk_end += chunk_size
        return 0
    def test_job_level_mounting(self, resource_group, location, cluster, storage_account, storage_account_key):
        """Tests if it's possible to mount external file systems for a job."""
        job_name = 'job'

        # Create file share and container to mount on the job level
        if storage_account.name != helpers.FAKE_STORAGE.name:
            files = FileService(storage_account.name, storage_account_key)
            files.create_share('jobshare', fail_on_exist=False)
            blobs = BlockBlobService(storage_account.name, storage_account_key)
            blobs.create_container('jobcontainer', fail_on_exist=False)

        job = self.client.jobs.create(
            resource_group.name,
            job_name,
            parameters=models.JobCreateParameters(
                location=location,
                cluster=models.ResourceId(id=cluster.id),
                node_count=1,
                mount_volumes=models.MountVolumes(
                    azure_file_shares=[
                        models.AzureFileShareReference(
                            account_name=storage_account.name,
                            azure_file_url='https://{0}.file.core.windows.net/{1}'.format(
                                storage_account.name, 'jobshare'),
                            relative_mount_path='job_afs',
                            credentials=models.AzureStorageCredentialsInfo(
                                account_key=storage_account_key
                            ),
                        )
                    ],
                    azure_blob_file_systems=[
                        models.AzureBlobFileSystemReference(
                            account_name=storage_account.name,
                            container_name='jobcontainer',
                            relative_mount_path='job_bfs',
                            credentials=models.AzureStorageCredentialsInfo(
                                account_key=storage_account_key
                            ),
                        )
                    ]
                ),
                # Put standard output on cluster level AFS to check that the job has access to it.
                std_out_err_path_prefix='$AZ_BATCHAI_MOUNT_ROOT/{0}'.format(helpers.AZURE_FILES_MOUNTING_PATH),
                # Create two output directories on job level AFS and blobfuse.
                output_directories=[
                    models.OutputDirectory(id='OUTPUT1', path_prefix='$AZ_BATCHAI_JOB_MOUNT_ROOT/job_afs'),
                    models.OutputDirectory(id='OUTPUT2', path_prefix='$AZ_BATCHAI_JOB_MOUNT_ROOT/job_bfs')
                ],
                # Check that the job preparation has access to job level file systems.
                job_preparation=models.JobPreparation(
                    command_line='echo afs > $AZ_BATCHAI_OUTPUT_OUTPUT1/prep_afs.txt; '
                                 'echo bfs > $AZ_BATCHAI_OUTPUT_OUTPUT2/prep_bfs.txt; '
                                 'echo done'
                ),
                # Check that the job has access to job
                custom_toolkit_settings=models.CustomToolkitSettings(
                    command_line='echo afs > $AZ_BATCHAI_OUTPUT_OUTPUT1/job_afs.txt; '
                                 'echo bfs > $AZ_BATCHAI_OUTPUT_OUTPUT2/job_bfs.txt; '
                                 'mkdir $AZ_BATCHAI_OUTPUT_OUTPUT1/afs; '
                                 'echo afs > $AZ_BATCHAI_OUTPUT_OUTPUT1/afs/job_afs.txt; '
                                 'mkdir $AZ_BATCHAI_OUTPUT_OUTPUT2/bfs; '
                                 'echo bfs > $AZ_BATCHAI_OUTPUT_OUTPUT2/bfs/job_bfs.txt; '
                                 'echo done'
                )
            )
        ).result()
        self.assertEqual(
            helpers.wait_for_job_completion(self.is_live, self.client, resource_group.name, job.name,
                                            helpers.MINUTE),
            models.ExecutionState.succeeded)

        job = self.client.jobs.get(resource_group.name, job.name)
        # Assert job and job prep standard output is populated on cluster level filesystem
        helpers.assert_job_files_are(self, self.client, resource_group.name, job.name,
                                     helpers.STANDARD_OUTPUT_DIRECTORY_ID,
                                     {u'stdout.txt': u'done\n', u'stderr.txt': u'',
                                      u'stdout-job_prep.txt': u'done\n', u'stderr-job_prep.txt': u''})
        # Assert files are generated on job level AFS
        helpers.assert_job_files_are(self, self.client, resource_group.name, job.name, 'OUTPUT1',
                                     {u'job_afs.txt': u'afs\n', u'prep_afs.txt': u'afs\n', u'afs': None})
        # Assert files are generated on job level blobfuse
        helpers.assert_job_files_are(self, self.client, resource_group.name, job.name, 'OUTPUT2',
                                     {u'job_bfs.txt': u'bfs\n', u'prep_bfs.txt': u'bfs\n', u'bfs': None})
        # Assert subfolders are available via API
        helpers.assert_job_files_in_path_are(self, self.client, resource_group.name, job.name, 'OUTPUT1',
                                             'afs', {u'job_afs.txt': u'afs\n'})
        helpers.assert_job_files_in_path_are(self, self.client, resource_group.name, job.name, 'OUTPUT2',
                                             'bfs', {u'job_bfs.txt': u'bfs\n'})

        # Assert that we can access the output files created on job level mount volumes directly in storage using path
        # segment returned by the server.
        if storage_account.name != helpers.FAKE_STORAGE.name:
            files = FileService(storage_account.name, storage_account_key)
            self.assertTrue(
                files.exists('jobshare', job.job_output_directory_path_segment +
                             '/' + helpers.OUTPUT_DIRECTORIES_FOLDER_NAME, 'job_afs.txt'))
            blobs = BlockBlobService(storage_account.name, storage_account_key)
            self.assertTrue(
                blobs.exists('jobcontainer', job.job_output_directory_path_segment +
                             '/' + helpers.OUTPUT_DIRECTORIES_FOLDER_NAME + '/job_bfs.txt'))
        # After the job is done the filesystems should be unmounted automatically, check this by submitting a new job.
        checker = self.client.jobs.create(
            resource_group.name,
            'checker',
            parameters=models.JobCreateParameters(
                location=location,
                cluster=models.ResourceId(id=cluster.id),
                node_count=1,
                std_out_err_path_prefix='$AZ_BATCHAI_MOUNT_ROOT/{0}'.format(helpers.AZURE_FILES_MOUNTING_PATH),
                custom_toolkit_settings=models.CustomToolkitSettings(
                    command_line='echo job; df | grep -E "job_bfs|job_afs"'
                )
            )
        ).result()
        # Check the job failed because there are not job level mount volumes anymore
        self.assertEqual(
            helpers.wait_for_job_completion(self.is_live, self.client, resource_group.name, checker.name,
                                            helpers.MINUTE),
            models.ExecutionState.failed)
        # Check that the cluster level AFS was still mounted
        helpers.assert_job_files_are(self, self.client, resource_group.name, checker.name,
                                     helpers.STANDARD_OUTPUT_DIRECTORY_ID,
                                     {u'stdout.txt': u'job\n', u'stderr.txt': u''})
Esempio n. 55
0
# -*- coding: utf-8 -*-
import os
from azure.storage.blob import BlockBlobService, ContentSettings

ACCOUNT = os.environ.get('BLOB_ACCOUNT')
KEY = os.environ.get('BLOB_KEY')
CONTAINER = 'beercartography'

block_blob_service = BlockBlobService(account_name=ACCOUNT, account_key=KEY)
block_blob_service.create_container(CONTAINER)


def save_file(stream, filename, mimetype):
    block_blob_service.create_blob_from_stream(
            CONTAINER,
            filename,
            stream,
            content_settings=ContentSettings(content_type=mimetype)
        )


def get_images():
    generator = block_blob_service.list_blobs('beercartography')
    for blob in generator:
        yield blob.name


def get_image(filename):
    blob = block_blob_service.get_blob_to_bytes(CONTAINER, filename)
    content = blob.content
    return content
Esempio n. 56
0
class AzureTransfer(BaseTransfer):
    def __init__(self, account_name, account_key, container_name, prefix=None):
        # NOTE: Azure wants all paths to start with a slash
        prefix = "/{}".format(prefix.lstrip("/") if prefix else "")
        super().__init__(prefix=prefix)
        self.account_name = account_name
        self.account_key = account_key
        self.container_name = container_name
        self.conn = BlockBlobService(account_name=self.account_name, account_key=self.account_key)
        self.container = self.get_or_create_container(self.container_name)
        self.log.debug("AzureTransfer initialized")
        # XXX: AzureTransfer isn't actively tested and hasn't its error handling is probably lacking
        #self.log.warning("AzureTransfer is experimental and has not been thoroughly tested")

    def get_metadata_for_key(self, key):
        key = self.format_key_for_backend(key)
        return self._list_blobs(key)[0]["metadata"]

    def _metadata_for_key(self, key):
        return self._list_blobs(key)[0]["metadata"]

    def list_path(self, key):
        path = self.format_key_for_backend(key, trailing_slash=True)
        return self._list_blobs(path)

    def _list_blobs(self, path):
        self.log.debug("Listing path %r", path)
        items = self.conn.list_blobs(self.container_name, prefix=path, delimiter="/", include="metadata")
        result = []
        for item in items:
            result.append({
                "last_modified": item.properties.last_modified.isoformat(),
                "metadata": item.metadata,
                "name": self.format_key_from_backend(item.name),
                "size": item.properties.content_length,
            })
        return result

    def delete_key(self, key):
        key = self.format_key_for_backend(key)
        self.log.debug("Deleting key: %r", key)
        return self.conn.delete_blob(self.container_name, key)

    def get_contents_to_file(self, key, filepath_to_store_to, *, progress_callback=None):
        key = self.format_key_for_backend(key)
        self.log.debug("Starting to fetch the contents of: %r to: %r", key, filepath_to_store_to)
        meta = self.conn.get_blob_to_path(self.container_name, key, filepath_to_store_to)
        if progress_callback:
            progress_callback(1, 1)
        return meta

    def get_contents_to_fileobj(self, key, fileobj_to_store_to, *, progress_callback=None):
        key = self.format_key_for_backend(key)
        self.log.debug("Starting to fetch the contents of: %r", key)
        meta = self.conn.get_blob_to_file(self.container_name, key, fileobj_to_store_to)
        if progress_callback:
            progress_callback(1, 1)
        return meta

    def get_contents_to_string(self, key):
        key = self.format_key_for_backend(key)
        self.log.debug("Starting to fetch the contents of: %r", key)
        return self.conn.get_blob_to_bytes(self.container_name, key), self._metadata_for_key(key)

    def store_file_from_memory(self, key, memstring, metadata=None):
        key = self.format_key_for_backend(key)
        self.conn.create_blob_from_bytes(self.container_name, key, memstring,
                                            x_ms_meta_name_values=self.sanitize_metadata(metadata))

    def store_file_from_disk(self, key, filepath, metadata=None, multipart=None):
        key = self.format_key_for_backend(key)
        self.conn.create_blob_from_path(self.container_name, key, filepath,None)
                                        #    x_ms_meta_name_values=self.sanitize_metadata(metadata))

    def get_or_create_container(self, container_name):
        start_time = time.time()
        self.conn.create_container(container_name)
        self.log.debug("Got/Created container: %r successfully, took: %.3fs", container_name, time.time() - start_time)
        return container_name
Esempio n. 57
0
class AzureTransfer(BaseTransfer):
    def __init__(self, account_name, account_key, bucket_name, prefix=None):
        prefix = "{}".format(prefix.lstrip("/") if prefix else "")
        super().__init__(prefix=prefix)
        self.account_name = account_name
        self.account_key = account_key
        self.container_name = bucket_name
        self.conn = BlockBlobService(account_name=self.account_name, account_key=self.account_key)
        self.container = self.get_or_create_container(self.container_name)
        self.log.debug("AzureTransfer initialized, %r", self.container_name)

    def get_metadata_for_key(self, key):
        key = self.format_key_for_backend(key, remove_slash_prefix=True, trailing_slash=False)
        results = self._list_blobs(key)
        if not results:
            raise FileNotFoundFromStorageError(key)
        return results[0]["metadata"]

    def _metadata_for_key(self, key):
        return self._list_blobs(key)[0]["metadata"]

    def list_path(self, key, trailing_slash=True):  # pylint: disable=arguments-differ
        # Trailing slash needed when listing directories, without when listing individual files
        path = self.format_key_for_backend(key, remove_slash_prefix=True, trailing_slash=trailing_slash)
        return self._list_blobs(path)

    def _list_blobs(self, path):
        self.log.debug("Listing path %r", path)
        if path:
            items = self.conn.list_blobs(self.container_name, prefix=path, delimiter="/", include="metadata")
        else:  # If you give Azure an empty path, it gives you an authentication error
            items = self.conn.list_blobs(self.container_name, delimiter="/", include="metadata")
        results = []
        for item in items:
            if not isinstance(item, BlobPrefix):
                results.append({
                    "last_modified": item.properties.last_modified,
                    # Azure Storage cannot handle '-' so we turn them into underscores and back again
                    "metadata": dict((k.replace("_", "-"), v) for k, v in item.metadata.items()),
                    "name": self.format_key_from_backend(item.name),
                    "size": item.properties.content_length,
                })
        return results

    def delete_key(self, key):
        key = self.format_key_for_backend(key, remove_slash_prefix=True)
        self.log.debug("Deleting key: %r", key)
        try:
            return self.conn.delete_blob(self.container_name, key)
        except azure.common.AzureMissingResourceHttpError as ex:
            raise FileNotFoundFromStorageError(key) from ex

    def get_contents_to_file(self, key, filepath_to_store_to, *, progress_callback=None):
        key = self.format_key_for_backend(key, remove_slash_prefix=True)

        self.log.debug("Starting to fetch the contents of: %r to: %r", key, filepath_to_store_to)
        try:
            self.conn.get_blob_to_path(self.container_name, key, filepath_to_store_to)
        except azure.common.AzureMissingResourceHttpError as ex:
            raise FileNotFoundFromStorageError(key) from ex

        if progress_callback:
            progress_callback(1, 1)
        return self._metadata_for_key(key)

    def get_contents_to_fileobj(self, key, fileobj_to_store_to, *, progress_callback=None):
        key = self.format_key_for_backend(key, remove_slash_prefix=True)

        self.log.debug("Starting to fetch the contents of: %r", key)
        try:
            self.conn.get_blob_to_stream(self.container_name, key, fileobj_to_store_to)
        except azure.common.AzureMissingResourceHttpError as ex:
            raise FileNotFoundFromStorageError(key) from ex

        if progress_callback:
            progress_callback(1, 1)

        return self._metadata_for_key(key)

    def get_contents_to_string(self, key):
        key = self.format_key_for_backend(key, remove_slash_prefix=True)
        self.log.debug("Starting to fetch the contents of: %r", key)
        try:
            blob = self.conn.get_blob_to_bytes(self.container_name, key)
            return blob.content, self._metadata_for_key(key)
        except azure.common.AzureMissingResourceHttpError as ex:
            raise FileNotFoundFromStorageError(key) from ex

    def store_file_from_memory(self, key, memstring, metadata=None):
        key = self.format_key_for_backend(key, remove_slash_prefix=True)
        self.conn.create_blob_from_bytes(self.container_name, key, memstring,
                                         metadata=self.sanitize_metadata(metadata, replace_hyphen_with="_"))

    def store_file_from_disk(self, key, filepath, metadata=None, multipart=None):
        key = self.format_key_for_backend(key, remove_slash_prefix=True)
        self.conn.create_blob_from_path(self.container_name, key, filepath,
                                        metadata=self.sanitize_metadata(metadata, replace_hyphen_with="_"))

    def get_or_create_container(self, container_name):
        start_time = time.monotonic()
        self.conn.create_container(container_name)
        self.log.debug("Got/Created container: %r successfully, took: %.3fs",
                       container_name, time.monotonic() - start_time)
        return container_name
#!/usr/bin/python

import sys,os
from azure.storage.blob import BlockBlobService
from azure.storage.blob import ContentSettings


block_blob_service = BlockBlobService(account_name=str(sys.argv[1]), account_key=str(sys.argv[2]))
block_blob_service.create_container('keys')

block_blob_service.create_blob_from_path(
    'keys',
    str(sys.argv[3]),
    os.path.join(os.getcwd(),str(sys.argv[3])),
    content_settings=ContentSettings(content_type='text')
)