Esempio n. 1
0
def delete_container(azure_storage_account_name, azure_storage_account_key,
                     container_name):
    """Deletes container from the storage account"""
    blob_service = BlockBlobService(account_name=azure_storage_account_name,
                                    account_key=azure_storage_account_key)
    if blob_service.exists(container_name):
        blob_service.delete_container(container_name)
Esempio n. 2
0
def main(mytimer: func.TimerRequest) -> None:
    utc_timestamp = datetime.datetime.utcnow().replace(
        tzinfo=datetime.timezone.utc).isoformat()

    if mytimer.past_due:
        logging.info('The timer is past due!')

    logging.info('Python timer trigger function ran at %s', utc_timestamp)

    #conect to storage account
    cwd = os.path.dirname(os.path.realpath(__file__))
    print(cwd)
    json_data = open(cwd + "/config.json").read()
    data = json.loads(json_data)
    print(data)

    account = data["Storage"][0]["Account"]
    accountKey = data["Storage"][0]["Key"]
    storage = BlockBlobService(account, accountKey)

    ignore = [
        'azure-webjobs-hosts', 'azure-webjobs-secrets',
        'azureappservice-run-from-package', 'function-releases', 'text'
    ]

    for container in storage.list_containers():
        if container.name not in ignore:
            properties = storage.get_container_properties(container.name)
            difference = datetime.datetime.now(
            ) - properties.properties.last_modified.replace(tzinfo=None)

            if difference > datetime.timedelta(minutes=10080):
                print('delete')
                print(difference)
                storage.delete_container(container.name)
def run_sample():
    try:
        # Create the BlockBlockService that is used to call the Blob service for the storage account
        block_blob_service = BlockBlobService(
            account_name='meetpythonstorage',
            account_key='abcabcabcabcabcabcabcabcabcabcabcabcabac')

        # Create a container called 'quickstartblobs'.
        container_name = 'quickstartblobs'
        block_blob_service.create_container(container_name)

        # Set the permission so the blobs are public.
        block_blob_service.set_container_acl(
            container_name, public_access=PublicAccess.Container)

        # Create a file in Documents to test the upload and download.
        local_path = os.path.abspath(os.path.curdir)
        local_file_name = input("Enter file name to upload : ")
        full_path_to_file = os.path.join(local_path, local_file_name)

        # Write text to the file.
        #file = open(full_path_to_file,  'w')
        #file.write("Hello, World!")
        #file.close()

        print("Temp file = " + full_path_to_file)
        print("\nUploading to Blob storage as blob" + local_file_name)

        # Upload the created file, use local_file_name for the blob name
        block_blob_service.create_blob_from_path(container_name,
                                                 local_file_name,
                                                 full_path_to_file)

        # List the blobs in the container
        print("\nList blobs in the container")
        generator = block_blob_service.list_blobs(container_name)
        for blob in generator:
            print("\t Blob name: " + blob.name)

        # Download the blob(s).
        # Add '_DOWNLOADED' as prefix to '.txt' so you can see both files in Documents.
        full_path_to_file2 = os.path.join(
            local_path, str.replace(local_file_name, '.txt',
                                    '_DOWNLOADED.txt'))
        print("\nDownloading blob to " + full_path_to_file2)
        block_blob_service.get_blob_to_path(container_name, local_file_name,
                                            full_path_to_file2)

        sys.stdout.write(
            "Sample finished running. When you hit <any key>, the sample will be deleted and the sample "
            "application will exit.")
        sys.stdout.flush()
        input()

        # Clean up resources. This includes the container and the temp files
        block_blob_service.delete_container(container_name)
        os.remove(full_path_to_file)
        os.remove(full_path_to_file2)
    except Exception as e:
        print(e)
Esempio n. 4
0
def cleanup_old_resources(blob_client: azureblob.BlockBlobService,
                          days: int = 7):
    """
    Delete any storage container that has been around for 7 days by default. 

    :param blob_client: A blob service client.
    :type blob_client: `azure.storage.blob.BlockBlobService`
    :param days: If the storage account is older than this number of days delete it, default = 7
    :param int: 
    """
    # The current time 7 days ago.
    timeout = utc.localize(
        datetime.datetime.now()) + datetime.timedelta(days=-days)

    try:
        for container in blob_client.list_containers():
            if container.properties.last_modified < timeout:
                if 'fgrp' in container.name:
                    logger.info(
                        "Deleting container {}, it is older than 7 days.".
                        format(container.name))
                    blob_client.delete_container(container.name)
    except Exception as e:
        logger.error(
            "Failed to clean up resources due to the error: {}".format(e))
        raise e
Esempio n. 5
0
    def test_account_sas_with_question_mark_prefix(self):
        # SAS URL is calculated from storage key, so this test runs live only
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        token = '?' + self.account.generate_shared_access_signature(
            Services.BLOB,
            ResourceTypes.OBJECT + ResourceTypes.CONTAINER,
            AccountPermissions.READ + AccountPermissions.WRITE +
            AccountPermissions.DELETE + AccountPermissions.CREATE,
            datetime.utcnow() + timedelta(hours=1),
        )

        service = BlockBlobService(self.account_name,
                                   sas_token=token,
                                   is_emulated=self.settings.IS_EMULATED)
        data = b'shared access signature with read/write permission on blob'
        container_name = self.get_resource_name("container")
        blob_name = 'blob1.txt'

        try:
            # Act
            service.create_container(container_name)
            service.create_blob_from_bytes(container_name, blob_name, data)
            blob = service.get_blob_to_bytes(container_name, blob_name)

            # Assert
            self.assertIsNotNone(blob)
            self.assertEqual(data, blob.content)
        finally:
            service.delete_container(container_name)
Esempio n. 6
0
def delete_all():
    '''
    Delete all the Azure blob storage files and database metadata relating to a specific user.
    There is no need to specify the user as this information can be obtained from the 'current_user' object that comes the Flask-Login Flask extension.

    '''


    block_blob_service = BlockBlobService(account_name='mednlpstorage', account_key='v+IgtNIIRhZjqMZx+e886rhJMVAhIUoUfG252SVIftBCyx8bG+NE0apP20xakOsMRQfNZFbUggUUULN2JER8lg==')
    current_username  = current_user.username
    user = User.query.filter_by(username=current_username).first()
    user_id = user.id
    single_files = Single_Upload.query.filter_by(user_id=user_id)
    group_files = Group_Upload.query.filter_by(user_id=user_id)

    for x in single_files:
        container_name = x.container_name
        block_blob_service.delete_container(container_name)
        db.session.delete(x)

    for x in group_files:
        container_name = x.container_name
        block_blob_service.delete_container(container_name)
        db.session.delete(x)

    db.session.commit()
Esempio n. 7
0
def clean_account():
    '''Removes all containers'''
    block_blob_service = BlockBlobService(account_name='forampics', account_key='4nwt5cexYaNCgmsk5NrLLm5lmRprYobFVepz+hhb6b7hv2f6zifM1EPmoqT7SMTsUYvWSe3nREd/dS6g8Thjmg==')
    containers = block_blob_service.list_containers()
    for c in containers:
        if c.name == 'allstaticfiles':
            pass
        else:
            block_blob_service.delete_container(c.name)
Esempio n. 8
0
    def store(self):
        from azure.storage.blob import BlockBlobService

        container = uuid()
        conn_string = create_azure_conn_string(load_azure_credentials())
        s = BlockBlobService(connection_string=conn_string)

        yield AzureBlockBlobStore(conn_string=conn_string, container=container,
                                  public=False)
        s.delete_container(container)
Esempio n. 9
0
 def create_store():
     blob_client = BlockBlobService(is_emulated=True)
     blob_client.delete_container('test')
     blob_client.create_container('test')
     store = ABSStore(container='test',
                      prefix='zarrtesting/',
                      account_name='foo',
                      account_key='bar',
                      blob_service_kwargs={'is_emulated': True})
     store.rmdir()
     return store, None
Esempio n. 10
0
    def store(self):
        from azure.storage.blob import BlockBlobService

        container = uuid()
        conn_string = create_azure_conn_string(load_azure_credentials())
        s = BlockBlobService(connection_string=conn_string)

        yield AzureBlockBlobStore(conn_string=conn_string,
                                  container=container,
                                  public=False)
        s.delete_container(container)
Esempio n. 11
0
def azure_blob_storage():
    try:
        # To create a blob service or connection to azure blob storage 
        block_blob_service = BlockBlobService(account_name='account_name', account_key='account_key')

        # To create a container 
        container_name ='container_name'
        block_blob_service.create_container(container_name)

        # Set access to a container such as public ,readonly,private
        block_blob_service.set_container_acl(container_name, public_access=PublicAccess.Container)

        # Create a file to upload to azure blob storage 
        local_file_name ="Test.txt"
        path_to_file =os.path.join('local_path', local_file_name)

        # To write to the file  
        local_file = open(full_path_to_file,  'w')
        local_file.write("hi peoplee")
        local_file.close()

        # Upload the created file, use local_file_name for the blob name
        block_blob_service.create_blob_from_path(container_name, local_file_name, full_path_to_file)

        # To List all the blobs in the container 
        container_List = block_blob_service.list_blobs(container_name)
        for file in container_List:
            print("Blob name: "+{file.name})

        # Download the blob(s).
        download_file_path = os.path.join('local_path', 'local_file_name')
        block_blob_service.get_blob_to_path(container_name, local_file_name, download_file_path)

        #delete a blob 
        block_blob_service.delete_blob('container_name', 'blob_name')

        #Append to a blob service    
        append_blob_service = AppendBlobService(account_name='myaccount', account_key='mykey')

        # The same containers can hold all types of blobs
        append_blob_service.create_container('container_name')

        #To append file must exists 
        append_blob_service.create_blob('container_name', 'append_blob')

        #Append to a blob service    
        append_blob_service.append_blob_from_text('container_name', 'append_blob', 'Hello, world!')
        
        append_blob = append_blob_service.get_blob_to_text('container_name', 'append_blob')


        # Clean up resources. This includes the container and the temp files
        block_blob_service.delete_container(container_name)
    except Exception as e:
Esempio n. 12
0
    def store(self):
        class ExtendedKeysStore(ExtendedKeyspaceMixin, AzureBlockBlobStore):
            pass
        from azure.storage.blob import BlockBlobService

        container = uuid()
        conn_string = create_azure_conn_string(load_azure_credentials())
        s = BlockBlobService(connection_string=conn_string)

        yield ExtendedKeysStore(conn_string=conn_string,
                                container=container, public=False)
        s.delete_container(container)
Esempio n. 13
0
def test_azure_setgetstate():
    from azure.storage.blob import BlockBlobService
    container = uuid()
    conn_string = create_azure_conn_string(load_azure_credentials())
    s = BlockBlobService(connection_string=conn_string)
    store = AzureBlockBlobStore(conn_string=conn_string, container=container)
    store.put(u'key1', b'value1')

    buf = pickle.dumps(store, protocol=2)
    store = pickle.loads(buf)

    assert store.get(u'key1') == b'value1'
    s.delete_container(container)
Esempio n. 14
0
def test_azure_setgetstate():
    from azure.storage.blob import BlockBlobService
    container = uuid()
    conn_string = create_azure_conn_string(load_azure_credentials())
    s = BlockBlobService(connection_string=conn_string)
    store = AzureBlockBlobStore(conn_string=conn_string, container=container)
    store.put(u'key1', b'value1')

    buf = pickle.dumps(store, protocol=2)
    store = pickle.loads(buf)

    assert store.get(u'key1') == b'value1'
    s.delete_container(container)
def run_sample():
    try:
        # Create the BlockBlockService that is used to call the Blob service for the storage account
        block_blob_service = BlockBlobService(account_name='meetpythonstorage', account_key='duOguiKnYb6ZEbJC6BftWqA2lcH67dWkmCSEJj+KxOTOHCNPeV7r4oO6feTw7gSSoFGKHryL4yqSVWlEkm6jWg==')

        # Create a container called 'quickstartblobs'.
        container_name ='quickstartblobs'
        block_blob_service.create_container(container_name)

        # Set the permission so the blobs are public.
        block_blob_service.set_container_acl(container_name, public_access=PublicAccess.Container)

        # Create a file in Documents to test the upload and download.
        local_path=os.path.abspath(os.path.curdir)
        local_file_name =input("Enter file name to upload : ")
        full_path_to_file =os.path.join(local_path, local_file_name)

        # Write text to the file.
        #file = open(full_path_to_file,  'w')
        #file.write("Hello, World!")
        #file.close()

        print("Temp file = " + full_path_to_file)
        print("\nUploading to Blob storage as blob" + local_file_name)

        # Upload the created file, use local_file_name for the blob name
        block_blob_service.create_blob_from_path(container_name, local_file_name, full_path_to_file)

        # List the blobs in the container
        print("\nList blobs in the container")
        generator = block_blob_service.list_blobs(container_name)
        for blob in generator:
            print("\t Blob name: " + blob.name)

        # Download the blob(s).
        # Add '_DOWNLOADED' as prefix to '.txt' so you can see both files in Documents.
        full_path_to_file2 = os.path.join(local_path, str.replace(local_file_name ,'.txt', '_DOWNLOADED.txt'))
        print("\nDownloading blob to " + full_path_to_file2)
        block_blob_service.get_blob_to_path(container_name, local_file_name, full_path_to_file2)

        sys.stdout.write("Sample finished running. When you hit <any key>, the sample will be deleted and the sample "
                         "application will exit.")
        sys.stdout.flush()
        input()

        # Clean up resources. This includes the container and the temp files
        block_blob_service.delete_container(container_name)
        os.remove(full_path_to_file)
        os.remove(full_path_to_file2)
    except Exception as e:
        print(e)
Esempio n. 16
0
def run_sample():
    try:
        # Create the BlockBlockService that is used to call the Blob service for the storage account
        block_blob_service = BlockBlobService(account_name='hackgt19', account_key='24wGa1RHd0BnemSDBbqRzvvTAB7Qy4IAN28E9de6OLR98wxnFljJXnKaBtzqJd2F53SmtNZP2NnZCPZkeL6wlQ==')

        # Create a container called 'quickstartblobs'.
        container_name ='quickstartblobs'
        block_blob_service.create_container(container_name)

        # Set the permission so the blobs are public.
        block_blob_service.set_container_acl(container_name, public_access=PublicAccess.Container)

        # Create a file in Documents to test the upload and download.
        local_path=os.path.expanduser("~/Documents")
        local_file_name ="QuickStart_" + str(uuid.uuid4()) + ".txt"
        full_path_to_file =os.path.join(local_path, local_file_name)

        # Write text to the file.
        file = open(full_path_to_file,  'w')
        file.write("Hello, World!")
        file.close()

        print("Temp file = " + full_path_to_file)
        print("\nUploading to Blob storage as blob" + local_file_name)

        # Upload the created file, use local_file_name for the blob name
        block_blob_service.create_blob_from_path(container_name, local_file_name, full_path_to_file)
        # List the blobs in the container
        print("\nList blobs in the container")
        generator = block_blob_service.list_blobs(container_name)
        for blob in generator:
            print("\t Blob name: " + blob.name)

        # Download the blob(s).
        # Add '_DOWNLOADED' as prefix to '.txt' so you can see both files in Documents.
        full_path_to_file2 = os.path.join(local_path, str.replace(local_file_name ,'.txt', '_DOWNLOADED.txt'))
        print("\nDownloading blob to " + full_path_to_file2)
        block_blob_service.get_blob_to_path(container_name, local_file_name, full_path_to_file2)

        sys.stdout.write("Sample finished running. When you hit <any key>, the sample will be deleted and the sample "
                         "application will exit.")
        sys.stdout.flush()
        input()

        # Clean up resources. This includes the container and the temp files
        block_blob_service.delete_container(container_name)
        os.remove(full_path_to_file)
        os.remove(full_path_to_file2)
    except Exception as e:
        print(e)
Esempio n. 17
0
def clean_old_containers():
    blob_client = BlockBlobService(account_name=settings.AZURE_ACCOUNT_NAME,
                                   account_key=settings.AZURE_ACCOUNT_KEY)
    # Patterns we have to worry about - data-request-digits, geneseekr-digits
    # TODO: Add more of these as more analysis types get created.
    patterns_to_search = ['^data-request-\d+$', '^geneseekr-\d+$']
    generator = blob_client.list_containers(include_metadata=True)
    for container in generator:
        for pattern in patterns_to_search:
            if re.match(pattern, container.name):
                today = datetime.datetime.now(timezone.utc)
                container_age = abs(container.properties.last_modified - today).days
                if container_age > 7:
                    blob_client.delete_container(container.name)
Esempio n. 18
0
    def store(self):
        class ExtendedKeysStore(ExtendedKeyspaceMixin, AzureBlockBlobStore):
            pass

        from azure.storage.blob import BlockBlobService

        container = uuid()
        conn_string = create_azure_conn_string(load_azure_credentials())
        s = BlockBlobService(connection_string=conn_string)

        yield ExtendedKeysStore(conn_string=conn_string,
                                container=container,
                                public=False)
        s.delete_container(container)
Esempio n. 19
0
 def setUpClass(cls):
     # setUpClass runs only once, instead of every test case.
     # This means we get to upload a bunch of files, and then check that they're really there.
     # These tests have to get run in top to bottom order, so they're not (strictly speaking) unit tests,
     # but they get run in order by default, so my worries are fairly minimal.
     super(TestAPI, cls).setUpClass()
     blob_client = BlockBlobService(account_name=settings.AZURE_ACCOUNT_NAME,
                                    account_key=settings.AZURE_ACCOUNT_KEY)
     # Make sure container doesn't already exist. API will autoconvert _ to - and make the whole thing lowercase
     blob_client.delete_container('111111-fake')
     # Give a bit of time for the delete operation to go through. Painfully long for a unit test, but sometimes
     # the delete operation takes quite a while. At least it only happens once!
     time.sleep(40)
     user = User.objects.create(username='******')
     user.set_password('password')
     user.save()
Esempio n. 20
0
 def delete_sto_container(self):
     key = self.get_sto_account_key()
     try:
         blob = BlockBlobService(self.sto_account, key)
     except:
         return False
     return blob.delete_container(self.sto_cont_name)
Esempio n. 21
0
def run(resource, *args, **kwargs):
    container_name = resource.azure_container_name

    azure_account_name = resource.azure_account_name

    block_blob_service = BlockBlobService(account_name=azure_account_name, account_key=resource.azure_account_key)

    if block_blob_service.exists(container_name):
        try:
            block_blob_service.delete_container(container_name)
        except Exception as error:
            return "FAILURE", f"Failed to delete container '{container_name}'", f"{error}"
    else:
        return "FAILURE", f"Failed to delete container '{container_name}'", f"Container don't exist."

    return "SUCCESS", f"Successfully deleted container -> '{container_name}'", ""
class BlobManager(object):
    def __init__(self, account_name, account_key):
        self.block_blob_service = BlockBlobService(account_name=account_name,
                                                   account_key=account_key)

    def create_container(self, container_name):
        containers = self.block_blob_service.list_containers()
        for container in containers:
            if container.name == container_name:
                return
        self.block_blob_service.create_container(container_name)
        # Set permission as public
        self.block_blob_service.set_container_acl(
            container_name, public_access=PublicAccess.Container)

    def upload(self, filename, container_name):
        full_file_path = os.path.join(os.getcwd(), filename)
        self.block_blob_service.create_blob_from_path(container_name, filename,
                                                      full_file_path)

    def download(self, path, filename, container_name):
        full_file_path = os.path.join(
            path, str.replace(filename, '.', '_Downloaded.'))
        self.block_blob_service.get_blob_to_path(container_name, filename,
                                                 full_file_path)

    def list_blobs(self, container_name):
        return self.block_blob_service.list_blobs(container_name)

    def get_blob_url(self, container_name, blob_name):
        return self.block_blob_service.make_blob_url(container_name, blob_name)

    def delete_container(self, container_name):
        self.block_blob_service.delete_container(container_name)

    def clear_container(self, container_name):
        blobs = self.list_blobs(container_name)
        for blob in blobs:
            self.delete_blob(container_name, blob.name)

    def delete_blob(self, container_name, blob_name):
        self.block_blob_service.delete_blob(container_name, blob_name)

    def clear(self):
        containers = self.block_blob_service.list_containers()
        for container in containers:
            self.clear_container(container.name)
Esempio n. 23
0
def cleanup_old_resources(blob_client: azureblob.BlockBlobService,
                          batch_client: batch.BatchExtensionsClient,
                          hours: int = 1):
    """
    Delete any old resources from prior runs (storage containers, pools, jobs) older than "hours"

    :param blob_client: A blob service client.
    :type blob_client: `azure.storage.blob.BlockBlobService`
    :param batch_client: A batch service extensions client.
    :type batch_client: `azext.batch.BatchExtensionsClient`
    :param hours: Delete all resources older than this many hours
    :param int: 
    """
    # cleanup resources with last-modified before timeout
    timeout = datetime.now(timezone.utc) + timedelta(hours=-hours)

    try:
        pools = batch_client.pool.list()
        for pool in pools:
            if pool.last_modified < timeout:
                logger.info(
                    "Deleting pool {}, it is older than {} hours.".format(
                        pool.id, hours))
                delete_pool(batch_client, pool.id)

        for job in batch_client.job.list():
            if job.last_modified < timeout:
                logger.info(
                    "Deleting job {}, it is older than {} hours.".format(
                        job.id, hours))
                delete_job(batch_client, job.id)

        for container in blob_client.list_containers():
            if container.properties.last_modified < timeout:
                # don't delete output filegroups as we might need them for
                # diagnosis
                if 'fgrp' in container.name and not container.name.endswith(
                        output_fgrp_postfix):
                    logger.info(
                        "Deleting container {}, it is older than {} hours.".
                        format(container.name, hours))
                    blob_client.delete_container(container.name)

    except Exception as e:
        logger.error(
            "Failed to clean up resources due to the error: {}".format(e))
        raise e
Esempio n. 24
0
def run_sample():
    try:
       
        block_blob_service = BlockBlobService(account_name='kazen1', account_key='ds7aI/Mo3YuYaWe9a/tgKrPFf0mnXwOEv+I1EFgmcH3dMSGkc9Pbc1Zzt8hPfa+70TlPNJQU/xPg+nbeoCX5Cg==')

        
        container_name ='kazn1'
        block_blob_service.create_container(container_name)

       
        block_blob_service.set_container_acl(container_name, public_access=PublicAccess.Container)

        
        local_path=os.path.abspath(os.path.curdir)
        local_file_name =input("Enter file name to upload : ")
        full_path_to_file =os.path.join(local_path, local_file_name)

      

        print("Temp file = " + full_path_to_file)
        print("\nUploading to Blob storage as blob" + local_file_name)

       
        block_blob_service.create_blob_from_path(container_name, local_file_name, full_path_to_file)

        
        print("\nList blobs in the container")
        generator = block_blob_service.list_blobs(container_name)
        for blob in generator:
            print("\t Blob name: " + blob.name)

        full_path_to_file2 = os.path.join(local_path, str.replace(local_file_name ,'.txt', '_DOWNLOADED.txt'))
        print("\nDownloading blob to " + full_path_to_file2)
        block_blob_service.get_blob_to_path(container_name, local_file_name, full_path_to_file2)

        sys.stdout.write("Sample finished running. When you hit <any key>, the sample will be deleted and the sample "
                         "application will exit.")
        sys.stdout.flush()
        input()

        
        block_blob_service.delete_container(container_name)
        os.remove(full_path_to_file)
        os.remove(full_path_to_file2)
    except Exception as e:
        print(e)
Esempio n. 25
0
def test_upload_single_file_to_dest_dir():
    azurebatch = parse_configuration_file('tests/valid_credentials.txt')
    azurebatch.input = {'': ['tests/valid_credentials.txt newdir']}
    azurebatch.job_name = 'pytest-2'
    resource_files = azurebatch.upload_input_to_blob_storage(input_id='')
    blob_client = BlockBlobService(
        account_name=azurebatch.storage_account_name,
        account_key=azurebatch.storage_account_key)
    generator = blob_client.list_blobs(container_name=azurebatch.job_name +
                                       '-input')
    blob_files = list()
    for blob in generator:
        blob_files.append(blob.name)
    assert len(blob_files) == 1
    assert resource_files[0].file_path == 'newdir/valid_credentials.txt'
    assert len(resource_files) == 1
    blob_client.delete_container(container_name=azurebatch.job_name + '-input')
Esempio n. 26
0
def _delete_container(conn_string, container):
    try:
        # for azure-storage-blob>=12:
        from azure.storage.blob import BlobServiceClient
        from azure.core.exceptions import AzureError

        s = BlobServiceClient.from_connection_string(conn_string)
        try:
            s.delete_container(container)
        except AzureError as ex:
            # ignore the ContainerNotFound error:
            if ex.error_code != 'ContainerNotFound':
                raise
    except ImportError:
        # for azure-storage-blob<12
        from azure.storage.blob import BlockBlobService
        s = BlockBlobService(connection_string=conn_string)
        s.delete_container(container)
def run_sample():
    try:
        # Create the BlockBlockService that is used to call the Blob service for the storage account
        block_blob_service = BlockBlobService(account_name='<account_name>', account_key='<account_key>')

        # Create a container called 'quickstartblobs'.
        container_name ='quickstartblobs'
        block_blob_service.create_container(container_name)

        # Set the permission so the blobs are public.
        block_blob_service.set_container_acl(container_name, public_access=PublicAccess.Container)

        # Create upload the fruit photo
        local_path=os.getcwd()+'\\data'
        local_file_name ='fruit.jpg'
        full_path_to_file =os.path.join(local_path, local_file_name)

        print("\nUploading to Blob storage as blob" + local_file_name)

        # Upload the created file, use local_file_name for the blob name
        block_blob_service.create_blob_from_path(container_name, local_file_name, full_path_to_file)

        # List the blobs in the container
        print("\nList blobs in the container")
        generator = block_blob_service.list_blobs(container_name)
        for blob in generator:
            print("\t Blob name: " + blob.name)

        # Download the blob(s).
        # Add '_DOWNLOADED' as prefix to '.jpg' so you can see both files in Documents.
        full_path_to_file2 = os.path.join(local_path, str.replace(local_file_name ,'.jpg', '_DOWNLOADED.jpg'))
        print("\nDownloading blob to " + full_path_to_file2)
        block_blob_service.get_blob_to_path(container_name, local_file_name, full_path_to_file2)

        sys.stdout.write("Sample finished running. When you hit <any key>, the sample will be deleted and the sample "
                         "application will exit.")
        sys.stdout.flush()
        input()

        # Clean up resources. This includes the container and the temp files
        block_blob_service.delete_container(container_name)
        os.remove(full_path_to_file2)
    except Exception as e:
        print(e)
Esempio n. 28
0
def run():
    try:
        # Create the BlockBlockService that is used to call the Blob service for the storage account
        block_blob_service = BlockBlobService(account_name=args["accountname"], account_key=args["key"])

        for subdir, dirs, files in os.walk(rootdir):
            filename = subdir[subdir.rfind('/') + 1:]
            number_files = len(files)
            print("\nSub Folder Name: {}, Total Files {}".format(filename, number_files))
            container_name = filename.lower()  # valid names only lower case alphanumeric plus dash
            # Create a container called filename.
            if number_files > 0:
                try:
                    block_blob_service.delete_container(container_name)
                    logger.info("deleted {}".format(container_name))
                except Exception as e:
                    logger.error(e)
    except Exception as e:
        print(e)
    def delete_resources(self,
                         batch_service_client: batch.BatchExtensionsClient,
                         blob_client: azureblob.BlockBlobService,
                         force_delete: bool = None):
        """
        Deletes the job, pool and the containers used for the job. If the job fails the output container will not be deleted.
        The non deleted container is used for debugging.

        :param batch_service_client: A Batch service client.
        :type batch_service_client: `azure.batch.BatchExtensionsClient`
        :param blob_client: A blob service client.
        :type blob_client: `azure.storage.blob.BlockBlobService`
        :param force_delete: Forces the deletion of all the containers related this job.
        :type force_delete: bool
        """
        # delete the job
        try:
            batch_service_client.job.delete(self.job_id)
        except batchmodels.batch_error.BatchErrorException as batch_exception:
            if utils.expected_exception(batch_exception,
                                        "The specified job does not exist"):
                logger.error("The specified Job [{}] was not created.".format(
                    self.job_id))
            else:
                traceback.print_exc()
                utils.print_batch_exception(batch_exception)

        if self.status.job_state in {
                utils.JobState.COMPLETE, utils.JobState.POOL_FAILED,
                utils.JobState.NOT_STARTED
        } or force_delete:
            logger.info('Deleting container [{}]...'.format(
                self.storage_info.input_container))
            blob_client.delete_container(self.storage_info.input_container)

            logger.info('Deleting container [{}]...'.format(
                self.storage_info.output_container))
            blob_client.delete_container(self.storage_info.output_container)
        else:
            logger.info("Did not delete the output container")
            logger.info(
                "Job: {}. did not complete successfully, Container {} was not deleted."
                .format(self.job_id, self.storage_info.output_container))
    def test_request_callback_signed_header(self):
        # Arrange
        service = BlockBlobService(self.account_name, self.account_key)
        name = self.get_resource_name('cont')

        # Act
        def callback(request):
            if request.method == 'PUT':
                request.headers['x-ms-meta-hello'] = 'world'

        service.request_callback = callback

        # Assert
        try:
            service.create_container(name)
            metadata = service.get_container_metadata(name)
            self.assertEqual(metadata, {'hello': 'world'})
        finally:
            service.delete_container(name)
Esempio n. 31
0
    def test_request_callback_signed_header(self):
        # Arrange
        service = BlockBlobService(self.account_name, self.account_key)
        name = self.get_resource_name('cont')

        # Act
        def callback(request):
            if request.method == 'PUT':
                request.headers['x-ms-meta-hello'] = 'world'

        service.request_callback = callback

        # Assert
        try:
            service.create_container(name)
            metadata = service.get_container_metadata(name)
            self.assertEqual(metadata, {'hello': 'world'})
        finally:
            service.delete_container(name)
Esempio n. 32
0
def test_upload_dir():
    azurebatch = parse_configuration_file('tests/valid_credentials.txt')
    azurebatch.input = {'': ['tests/test_files']}
    azurebatch.job_name = 'pytest-3'
    resource_files = azurebatch.upload_input_to_blob_storage(input_id='')
    blob_client = BlockBlobService(
        account_name=azurebatch.storage_account_name,
        account_key=azurebatch.storage_account_key)
    generator = blob_client.list_blobs(container_name=azurebatch.job_name +
                                       '-input')
    blob_files = list()
    for blob in generator:
        blob_files.append(blob.name)
    assert len(blob_files) == 2
    resource_file_paths = list()
    for resource_file in resource_files:
        resource_file_paths.append(resource_file.file_path)
    assert 'test_files/file_1.txt' in resource_file_paths and 'test_files/file_2.txt' in resource_file_paths
    assert len(resource_files) == 2
    blob_client.delete_container(container_name=azurebatch.job_name + '-input')
    def delete_resources(self, batch_service_client: batch.BatchExtensionsClient,
                         blob_client: azureblob.BlockBlobService, force_delete: bool = None):
        """
        Deletes the job, pool and the containers used for the job. If the job fails the output container will not be deleted.
        The non deleted container is used for debugging.

        :param batch_service_client: A Batch service client.
        :type batch_service_client: `azure.batch.BatchExtensionsClient`
        :param blob_client: A blob service client.
        :type blob_client: `azure.storage.blob.BlockBlobService`
        :param force_delete: Forces the deletion of all the containers related this job.
        :type force_delete: bool
        """
        # delete the job
        try:
            batch_service_client.job.delete(self.job_id)
        except batchmodels.batch_error.BatchErrorException as batch_exception:
            if utils.expected_exception(
                    batch_exception, "The specified job does not exist"):
                logger.error(
                    "The specified Job [{}] was not created.".format(
                        self.job_id))
            else:
                traceback.print_exc()
                utils.print_batch_exception(batch_exception)

        if self.status.job_state in {
            utils.JobState.COMPLETE, utils.JobState.POOL_FAILED, utils.JobState.NOT_STARTED} or force_delete:
            logger.info('Deleting container [{}]...'.format(
                self.storage_info.input_container))
            blob_client.delete_container(self.storage_info.input_container)

            logger.info('Deleting container [{}]...'.format(
                self.storage_info.output_container))
            blob_client.delete_container(self.storage_info.output_container)
        else:
            logger.info("Did not delete the output container")
            logger.info(
                "Job: {}. did not complete successfully, Container {} was not deleted.".format(
                    self.job_id, self.storage_info.output_container))
Esempio n. 34
0
def delete_container(STORAGE_NAME, STORAGE_KEY, CONTAINER_NAME):

    ##############################################################
    #RUN THIS ONLY IF YOU WANT TO DELETE A CONTAINTER            #
    #REMEMBER TO DOWNLOAD YOUR DATA BEFORE DELETING THE CONTAINER#
    #IMPORTANT: YOU WILL LOOSE YOUR BLOB INTO THE CONTAINER      #
    ##############################################################

    blob_service = BlockBlobService(account_name= STORAGE_NAME, account_key=STORAGE_KEY)

    #delete container
    delete_container = blob_service.delete_container(CONTAINER_NAME)
    print("{} delition status success: {}".format(CONTAINER_NAME, delete_container))
def cleanup_old_resources(blob_client: azureblob.BlockBlobService, days:int=7):
    """
    Delete any storage container that has been around for 7 days by default. 

    :param blob_client: A blob service client.
    :type blob_client: `azure.storage.blob.BlockBlobService`
    :param days: If the storage account is older than this number of days delete it, default = 7
    :param int: 
    """
    # The current time 7 days ago. 
    timeout = utc.localize(datetime.datetime.now()) + datetime.timedelta(days=-days)

    try:
        for container in blob_client.list_containers():
            if container.properties.last_modified < timeout:
                if 'fgrp' in container.name:
                    logger.info(
                        "Deleting container {}, it is older than 7 days.".format(container.name))
                    blob_client.delete_container(container.name)
    except Exception as e:
        logger.error("Failed to clean up resources due to the error: {}".format(e))
        raise e
Esempio n. 36
0
def main(req: func.HttpRequest) -> func.HttpResponse:
    logging.info('Python HTTP trigger function processed a request.')

    #conect to storage account
    cwd = os.path.dirname(os.path.realpath(__file__))
    print(cwd)
    json_data = open(cwd + "/config.json").read()
    data = json.loads(json_data)
    print(data)

    account = data["Storage"][0]["Account"]
    accountKey = data["Storage"][0]["Key"]
    storage = BlockBlobService(account, accountKey)

    sessionID = req.params.get('sessionID')
    if not sessionID:
        try:
            req_body = req.get_json()
        except ValueError:
            pass
        else:
            sessionID = req_body.get('sessionID')

    if sessionID:
        try:
            storage.delete_container(sessionID)
            return func.HttpResponse(
                f"The Session ID is {sessionID} - container terminated successfully"
            )
        except:
            return func.HttpResponse(
                "Invalid account name, key, or container name",
                status_code=400)
    else:
        return func.HttpResponse(
            "Please pass a name on the query string or in the request body",
            status_code=400)
Esempio n. 37
0
    def delete_resources(self, batch_service_client: batch.BatchExtensionsClient, blob_client: azureblob.BlockBlobService, delete_storage_containers: bool):
        """Delete resources used by this test - job, pool, and storageContainers
        
        :param batch_service_client: The batch client used for making batch operations
        :type batch_service_client: `azure.batch.BatchExtensionsClient`
        :param blob_client: A blob service client used for making blob operations.
        :type blob_client: `azure.storage.blob.BlockBlobService`
        :param delete_storage_containers: Should the storage containers be deleted
        :type delete_storage_containers: bool
        """
        # delete the job
        utils.terminate_and_delete_job(batch_service_client, self.job_id)

        # delete the pool
        utils.delete_pool(batch_service_client, self.pool_id)

        if delete_storage_containers:
            logger.info('Deleting input container [{}]...'.format(
                self.storage_info.input_container))
            blob_client.delete_container(self.storage_info.input_container)

            logger.info('Deleting output container [{}]...'.format(
                self.storage_info.output_container))
            blob_client.delete_container(self.storage_info.output_container)
#!/usr/bin/python

import sys,os
from azure.storage.blob import BlockBlobService

blob_service = BlockBlobService(account_name=str(sys.argv[1]), account_key=str(sys.argv[2]))

blob_service.delete_container('keys')
Esempio n. 39
0
    copy2("outputs" + str(number_machine) + ".tar.xz", "Outputs/")
    remove("outputs" + str(number_machine) + ".tar.xz")


#to get from the virtual machine to my script scp.get()
#get the outputs for print as output of the script    


print("The script was finished, you can see the outputs by machine in the directory Outputs\n")
print("Let to the script erase the created machines\n")

for number_machine in range(0, number_machines):
    name = 'azbrvm' + str(hash_number) + str(number_machine)
    
    try:
        result = sms.delete_deployment(name ,name,  delete_vhd = True)
        wait_for_async(result.request_id, "Deleting Deployment", "Delete Deployment", 1000)
    except:
        print("No Deployment")
    try:
        result = sms.delete_hosted_service(name, complete = True)
        wait_for_async(result.request_id,"Deleting Cloud Service", "Delete Cloud Service", 1000)
    except:
        print("No Cloud Service")


time.sleep(400)
block_blob_service.delete_container(name_container)
time.sleep(20)
sms.delete_storage_account(name_account)