def enviar_aquivos_audio_blob(main_app, dir="audio_files/"):
    for file in glob.glob(dir + "*.wav"):
        try:
            print("Processando arquivo " + file + "...")
            meeting_code = file.split("_")[1].split("/")[1]
            blob = meeting_code + "/" + file
            print("Meeting code " + str(meeting_code))
            blob_service = BlockBlobService(account_name=ACCOUNT_NAME,
                                            account_key=ACCOUNT_KEY)
            blob_service.create_blob_from_path(CONTAINER_NAME, blob, file)

            if os.path.exists(file):
                os.remove(file)

            queue_service = QueueService(account_name=ACCOUNT_NAME,
                                         account_key=ACCOUNT_KEY)
            queue_service.encode_function = QueueMessageFormat.text_base64encode
            payload = {
                "meeting-code": meeting_code,
                "blob": blob,
                "file-name": util.get_file_with_extension(file)
            }

            payload = json.dumps(payload, ensure_ascii=False)

            queue_service.put_message(QUEUE_NAME_AUDIO, payload)
            print("Arquivo " + file + " processado com sucesso.")

            main_app.mensagem["text"] = "File " + file + " synced successfully"

        except:
            traceback.format_exc()
예제 #2
0
    def copy_blob_across_storage(self,
                                 sourceblobpath="",
                                 destinationblobpath="",
                                 sourcecontainer="",
                                 destinationcontainer="",
                                 destination_source_account_name="",
                                 destination_source_account_key=""):
        """
        This method copies blob across different storage accounts.

        Args:
            sourceblobpath:source blob name
            destinationblobpath:destionation blob name
            sourcecontainer:source container name
            destinationcontainer:destination container
            destination_source_account_name: storage account name for destination storage account
            destination_source_account_key:storage account key for destination storage account

        """

        destinationfileservice = BlockBlobService(
            account_name=destination_source_account_name,
            account_key=destination_source_account_key)
        local_path = self.download_blob(blobpath=sourceblobpath,
                                        container=sourcecontainer)

        if len(destinationblobpath) == 0:
            destinationblobpath = "/".join(local_path.split("/")[1:])

        destinationfileservice.create_blob_from_path(
            container_name=destinationcontainer,
            blob_name=destinationblobpath,
            file_path=local_path)
        os.remove(local_path)
예제 #3
0
    def __init__(self, storage_account_name="", storage_account_access_key=""):

        self.storage_account_name = storage_account_name
        self.storage_account_access_key = storage_account_access_key
        self.file_service = BlockBlobService(
            account_name=self.storage_account_name,
            account_key=self.storage_account_access_key)
예제 #4
0
 def __init__(self, container_name, config: AzureStorageConfig):
     self._container_name = container_name
     self._blob_service = BlockBlobService(
         account_name=config.account_name,
         account_key=config.account_key
     )
     self._blob_service.create_container(self._container_name)
예제 #5
0
    def create(path: str, azure_info_path: Optional[str] = None):
        """This creates a RichPath object based on the input path.
        To create a remote path, just prefix it appropriately and pass
        in the path to the .json configuration.
        """
        if path.startswith(AZURE_PATH_PREFIX):
            assert azure_info_path is not None, "An AzurePath cannot be created when azure_info_path is None."
            # Strip off the AZURE_PATH_PREFIX:
            path = path[len(AZURE_PATH_PREFIX):]
            account_name, container_name, path = path.split('/', 2)

            with open(azure_info_path, 'r') as azure_info_file:
                azure_info = json.load(azure_info_file)
            account_info = azure_info.get(account_name)
            if account_info is None:
                raise Exception(
                    "Could not find access information for account '%s'!" %
                    (account_name, ))

            sas_token = account_info.get('sas_token')
            account_key = account_info.get('account_key')
            if sas_token is not None:
                assert not sas_token.startswith(
                    '?'
                ), 'SAS tokens should not start with "?". Just delete it.'  #  https://github.com/Azure/azure-storage-python/issues/301
                blob_service = BlockBlobService(account_name=account_name,
                                                sas_token=sas_token)
            elif account_key is not None:
                blob_service = BlockBlobService(account_name=account_name,
                                                account_key=account_key)
            else:
                raise Exception(
                    "Access to Azure storage account '%s' requires either account_key or sas_token!"
                    % (account_name, ))

            # ERROR is too verbose, in particular when downloading based on etags an error is emitted when blob
            # download is aborted.
            logging.getLogger('azure.storage').setLevel(logging.CRITICAL)

            # Replace environment variables in the cache location
            cache_location = account_info.get('cache_location')
            if cache_location is not None:

                def replace_by_env_var(m) -> str:
                    env_var_name = m.group(1)
                    env_var_value = os.environ.get(env_var_name)
                    if env_var_value is not None:
                        return env_var_value
                    else:
                        return env_var_name

                cache_location = re.sub('\${([^}]+)}', replace_by_env_var,
                                        cache_location)
            return AzurePath(path,
                             azure_container_name=container_name,
                             azure_blob_service=blob_service,
                             cache_location=cache_location)
        else:
            return LocalPath(path)
def upload_file_toblob(input_file, input_blob_name):

    blob_service = BlockBlobService(
        account_name=app.config['STORAGE_ACCOUNT_NAME'],
        account_key=app.config['STORAGE_ACCOUNT_KEY'])
    print('Uploading the input to blob storage...')
    blob_service.create_blob_from_path(app.config['STORAGE_CONTAINER_NAME'],
                                       input_blob_name, input_file)
예제 #7
0
class AzureStorageContainer(StorageContainer):
    """Azure implementation of Storage Container using BlockBlobService."""

    def __init__(self, container_name, config: AzureStorageConfig):
        self._blob_service = None
        self._container_name = container_name
        self._config = config

    def _get_client(self):
        """
        :return: BlockBlobService initialized with account
        name and key from config
        """
        if self._blob_service is None:
            self._blob_service = BlockBlobService(
                account_name=self._config.account_name,
                account_key=self._config.account_key
            )

            self._blob_service.create_container(self._container_name)

        return self._blob_service

    def upload_text(self, blob_name, text):
        """Uploads text to a new blob.

        :param blob_name: Name to give new blob
        :param text: Text to upload
        :return: None
        """
        self._get_client().create_blob_from_text(self._container_name,
                                                 blob_name, text)

    def list_blobs(self):
        """List all blobs in container.

        :return: List of blobs in container
        """
        return self._get_client().list_blobs(self._container_name)

    def get_blob_to_text(self, file_name):
        """Get string from contents of blob.

        :param file_name: Name of blob file
        :return: Text from blob file
        """
        return self._get_client().get_blob_to_text(self._container_name,
                                                   file_name)

    @staticmethod
    def create():
        """Initialize AzureStorageContainer with name and creds from config.

        :return:
        """
        return AzureStorageContainer(
            ProcessConfig().config_container_name,
            AzureConfig().storage_config)
예제 #8
0
파일: wabs_util.py 프로젝트: zurikus/wal-e
def uri_put_file(creds, uri, fp, content_type=None):
    assert fp.tell() == 0
    assert uri.startswith('wabs://')

    def log_upload_failures_on_error(exc_tup, exc_processor_cxt):
        def standard_detail_message(prefix=''):
            return (prefix + '  There have been {n} attempts to upload  '
                    'file {url} so far.'.format(n=exc_processor_cxt, url=uri))

        typ, value, tb = exc_tup
        del exc_tup

        # Screen for certain kinds of known-errors to retry from
        if issubclass(typ, socket.error):
            socketmsg = value[1] if isinstance(value, tuple) else value

            logger.info(
                msg='Retrying upload because of a socket error',
                detail=standard_detail_message(
                    "The socket error's message is '{0}'.".format(socketmsg)))
        else:
            # For all otherwise untreated exceptions, report them as a
            # warning and retry anyway -- all exceptions that can be
            # justified should be treated and have error messages
            # listed.
            logger.warning(
                msg='retrying file upload from unexpected exception',
                detail=standard_detail_message(
                    'The exception type is {etype} and its value is '
                    '{evalue} and its traceback is {etraceback}'.format(
                        etype=typ,
                        evalue=value,
                        etraceback=''.join(traceback.format_tb(tb)))))

        # Help Python GC by resolving possible cycles
        del tb

    url_tup = urlparse(uri)
    kwargs = dict(content_settings=ContentSettings(content_type),
                  validate_content=True)

    conn = BlockBlobService(creds.account_name,
                            creds.account_key,
                            sas_token=creds.access_token,
                            protocol='https')
    conn.create_blob_from_bytes(url_tup.netloc, url_tup.path.lstrip('/'),
                                fp.read(), **kwargs)

    # To maintain consistency with the S3 version of this function we must
    # return an object with a certain set of attributes.  Currently, that set
    # of attributes consists of only 'size'
    return _Key(size=fp.tell())
예제 #9
0
파일: wabs_util.py 프로젝트: wal-e/wal-e
def uri_get_file(creds, uri, conn=None):
    assert uri.startswith('wabs://')
    url_tup = urlparse(uri)

    if conn is None:
        conn = BlockBlobService(creds.account_name, creds.account_key,
                           sas_token=creds.access_token, protocol='https')

    data = io.BytesIO()

    conn.get_blob_to_stream(url_tup.netloc, url_tup.path.lstrip('/'), data)

    return data.getvalue()
예제 #10
0
    def _get_client(self):
        """
        :return: BlockBlobService initialized with account
        name and key from config
        """
        if self._blob_service is None:
            self._blob_service = BlockBlobService(
                account_name=self._config.account_name,
                account_key=self._config.account_key
            )

            self._blob_service.create_container(self._container_name)

        return self._blob_service
예제 #11
0
파일: wabs_util.py 프로젝트: wal-e/wal-e
def uri_put_file(creds, uri, fp, content_type=None):
    assert fp.tell() == 0
    assert uri.startswith('wabs://')

    def log_upload_failures_on_error(exc_tup, exc_processor_cxt):
        def standard_detail_message(prefix=''):
            return (prefix + '  There have been {n} attempts to upload  '
                    'file {url} so far.'.format(n=exc_processor_cxt, url=uri))
        typ, value, tb = exc_tup
        del exc_tup

        # Screen for certain kinds of known-errors to retry from
        if issubclass(typ, socket.error):
            socketmsg = value[1] if isinstance(value, tuple) else value

            logger.info(
                msg='Retrying upload because of a socket error',
                detail=standard_detail_message(
                    "The socket error's message is '{0}'."
                    .format(socketmsg)))
        else:
            # For all otherwise untreated exceptions, report them as a
            # warning and retry anyway -- all exceptions that can be
            # justified should be treated and have error messages
            # listed.
            logger.warning(
                msg='retrying file upload from unexpected exception',
                detail=standard_detail_message(
                    'The exception type is {etype} and its value is '
                    '{evalue} and its traceback is {etraceback}'
                    .format(etype=typ, evalue=value,
                            etraceback=''.join(traceback.format_tb(tb)))))

        # Help Python GC by resolving possible cycles
        del tb

    url_tup = urlparse(uri)
    kwargs = dict(
        content_settings=ContentSettings(content_type),
        validate_content=True)

    conn = BlockBlobService(creds.account_name, creds.account_key,
                sas_token=creds.access_token, protocol='https')
    conn.create_blob_from_bytes(url_tup.netloc, url_tup.path.lstrip('/'),
                fp.read(), **kwargs)

    # To maintain consistency with the S3 version of this function we must
    # return an object with a certain set of attributes.  Currently, that set
    # of attributes consists of only 'size'
    return _Key(size=fp.tell())
예제 #12
0
def delete_data_from_blob(prefix):
    from azure.storage.blob.blockblobservice import BlockBlobService

    ws = get_workspace()

    def_blob_store = ws.get_default_datastore()

    print("Deleting blobs from folder:", prefix)
    blob_service = BlockBlobService(
        def_blob_store.account_name, def_blob_store.account_key
    )

    generator = blob_service.list_blobs(
        def_blob_store.container_name, prefix=prefix
    )
    for blob in generator:
        if blob.name.endswith("mp4"):
            print("Deleting: " + blob.name)
            blob_service.delete_blob(def_blob_store.container_name, blob.name)

    generator = blob_service.list_blobs(
        def_blob_store.container_name, prefix=prefix
    )
    for blob in generator:
        print("Deleting: " + blob.name)
        blob_service.delete_blob(def_blob_store.container_name, blob.name)
예제 #13
0
def upload_slide():
    # TODO: If this config is not present, disable upload instead of failing to load
    accountName = app.config["AZURE_STORAGE_ACCOUNT_NAME"]
    containerName = app.config["AZURE_STORAGE_ACCOUNT_SVSUPLOAD_CONTAINER_NAME"]
    accountKey = app.config["AZURE_STORAGE_ACCOUNT_KEY"]
    blob_service = BlockBlobService(account_name=accountName, account_key=accountKey)

    permission = ContainerPermissions(write=True)

    now = datetime.now(timezone.utc)
    expiry = now + timedelta(hours=2)
    sasToken = blob_service.generate_container_shared_access_signature(container_name=containerName, permission=permission,
                                                             protocol='https', start=now, expiry=expiry)
    container_url = f'https://{accountName}.blob.core.windows.net/{containerName}?{sasToken}'
    return render_template('slide/upload.html', container_url=container_url)
예제 #14
0
파일: wabs_util.py 프로젝트: zurikus/wal-e
def uri_get_file(creds, uri, conn=None):
    assert uri.startswith('wabs://')
    url_tup = urlparse(uri)

    if conn is None:
        conn = BlockBlobService(creds.account_name,
                                creds.account_key,
                                sas_token=creds.access_token,
                                protocol='https')

    data = io.BytesIO()

    conn.get_blob_to_stream(url_tup.netloc, url_tup.path.lstrip('/'), data)

    return data.getvalue()
예제 #15
0
def resize_and_convert(image, width=500, height=500, container="media"):
    if settings.DEBUG:
        img = Image.open(image)
        img = img.resize((width, height), Image.ANTIALIAS)
        img = img.convert("RGB")
        img.save(image.path, format="JPEG")
    else:
        temp = io.BytesIO()
        img = Image.open(image)
        img = img.resize((width, height), Image.ANTIALIAS)
        img = img.convert("RGB")
        img.save(temp, format="JPEG")
        bbs = BlockBlobService(account_name='liveportal2019',
                               account_key=os.environ.get(
                                   'LP_AZURE_STORAGE_KEY', ''))
        bbs.create_blob_from_bytes(container, image.name, temp.getvalue())
예제 #16
0
def test_no_retry_on_keyboadinterrupt(collect):
    """Ensure that KeyboardInterrupts are forwarded."""
    key_name = 'test-key-name'
    b = B(name=key_name)

    # If vanilla KeyboardInterrupt is used, then sending SIGINT to the
    # test can cause it to pass improperly, so use a subtype instead.
    class MarkedKeyboardInterrupt(KeyboardInterrupt):
        pass

    collect.inject(MarkedKeyboardInterrupt('SIGINT, probably'))
    d = wabs_deleter.Deleter(BlockBlobService('test', 'ing'), 'test-container')

    with pytest.raises(MarkedKeyboardInterrupt):
        d.delete(b)

        # Exactly when coroutines are scheduled is non-deterministic,
        # so spin while yielding to provoke the
        # MarkedKeyboardInterrupt being processed within the
        # pytest.raises context manager.
        while True:
            gevent.sleep(0.1)

    # Only one key should have been aborted, since the purpose is to
    # *not* retry when processing KeyboardInterrupt.
    assert collect.aborted_keys == [key_name]

    # Turn off fault injection and flush/synchronize with close().
    collect.inject(None)
    d.close()

    # Since there is no retrying, no keys should be deleted.
    assert not collect.deleted_keys
예제 #17
0
 def connect(self, creds):
     """Return an azure BlockBlobService instance.
     """
     return BlockBlobService(account_name=creds.account_name,
                             account_key=creds.account_key,
                             sas_token=creds.access_token,
                             protocol='https')
예제 #18
0
def test_close_error():
    """Ensure that attempts to use a closed Deleter results in an error."""

    d = wabs_deleter.Deleter(BlockBlobService('test', 'ing'), 'test-container')
    d.close()

    with pytest.raises(exception.UserCritical):
        d.delete('no value should work')
예제 #19
0
 def service(self):
     if self._service is None:
         self._service = BlockBlobService(
             self.account_name,
             self.account_key,
             is_emulated=self.is_emulated,
             custom_domain=self.custom_domain,
         )
     return self._service
예제 #20
0
def test_processes_one_deletion(collect):
    key_name = 'test-key-name'
    b = B(name=key_name)

    d = wabs_deleter.Deleter(BlockBlobService('test', 'ing'), 'test-container')
    d.delete(b)
    d.close()

    assert collect.deleted_keys == [key_name]
예제 #21
0
 def __init__(self, locator, jobChunkSize=maxAzureTablePropertySize):
     super(AzureJobStore, self).__init__()
     accountName, namePrefix = locator.split(':', 1)
     if '--' in namePrefix:
         raise ValueError(
             "Invalid name prefix '%s'. Name prefixes may not contain %s." %
             (namePrefix, self.nameSeparator))
     if not self.containerNameRe.match(namePrefix):
         raise ValueError(
             "Invalid name prefix '%s'. Name prefixes must contain only digits, "
             "hyphens or lower-case letters and must not start or end in a "
             "hyphen." % namePrefix)
     # Reserve 13 for separator and suffix
     if len(namePrefix) > self.maxContainerNameLen - self.maxNameLen - len(
             self.nameSeparator):
         raise ValueError((
             "Invalid name prefix '%s'. Name prefixes may not be longer than 50 "
             "characters." % namePrefix))
     if '--' in namePrefix:
         raise ValueError(
             "Invalid name prefix '%s'. Name prefixes may not contain "
             "%s." % (namePrefix, self.nameSeparator))
     self.locator = locator
     self.jobChunkSize = jobChunkSize
     self.accountKey = _fetchAzureAccountKey(accountName)
     self.accountName = accountName
     # Table names have strict requirements in Azure
     self.namePrefix = self._sanitizeTableName(namePrefix)
     # These are the main API entry points.
     self.tableService = TableService(account_key=self.accountKey,
                                      account_name=accountName)
     self.blobService = BlockBlobService(account_key=self.accountKey,
                                         account_name=accountName)
     # Serialized jobs table
     self.jobItems = None
     # Job<->file mapping table
     self.jobFileIDs = None
     # Container for all shared and unshared files
     self.files = None
     # Stats and logging strings
     self.statsFiles = None
     # File IDs that contain stats and logging strings
     self.statsFileIDs = None
예제 #22
0
def get_azure_storage_client(config):
    # Todo: Move away from global client.
    global azure_storage_client

    if azure_storage_client is not None:
        return azure_storage_client

    azure_storage_client = BlockBlobService(
        config.get("storage_account"), account_key=config.get("storage_key"))

    return azure_storage_client
예제 #23
0
    def setUp(self):
        self.account_name = self.account_name or os.environ.get(
            "AZURE_BLOB_ACCOUNT_NAME")
        self.account_key = os.environ.get("AZURE_BLOB_ACCOUNT_KEY")
        try:
            warnings.simplefilter("ignore", ResourceWarning)
        except:
            pass

        self.connection = BlockBlobService(account_name=self.account_name,
                                           account_key=self.account_key)
예제 #24
0
 def _blob_service(self, custom_domain=None, connection_string=None):
     # This won't open a connection or anything,
     # it's akin to a client
     return BlockBlobService(account_name=self.account_name,
                             account_key=self.account_key,
                             sas_token=self.sas_token,
                             is_emulated=self.is_emulated,
                             protocol=self.azure_protocol,
                             custom_domain=custom_domain,
                             connection_string=connection_string,
                             token_credential=self.token_credential,
                             endpoint_suffix=self.endpoint_suffix)
예제 #25
0
def timed_job():
    config = configparser.ConfigParser()
    config.read('config/config.cfg')
    account = config.get('DEFAULT', 'ACCOUNT')
    key = config.get('DEFAULT', 'KEY')
    promi = config.get('DEFAULT', 'PROM')
    promup = promi.encode()
    container = config.get('DEFAULT', 'CONTAINER')
    url = config.get('DEFAULT', 'URL')
    blob_service = BlockBlobService(account_name=account, account_key=key)
    userAndPass = b64encode(promup).decode("ascii")
    headers = {'Authorization': 'Basic %s' % userAndPass}

    prom = PrometheusConnect(url=url, headers=headers, disable_ssl=False)
    metric_data = prom.all_metrics()

    time = datetime.now()
    metrics = []
    values = []

    for i in metric_data:
        metric = prom.get_metric_range_data(metric_name=i,
                                            start_time=time -
                                            timedelta(hours=1),
                                            end_time=time,
                                            chunk_size=timedelta(hours=1))
        x = int(0)
        for d in metric:
            for name, dct in d.items():
                dct = dict(dct)
                if name == 'metric':
                    dct['id'] = x
                    metrics.append(dct)
                else:
                    for key in dct:
                        va = {}
                        va['time'] = key
                        va['value'] = dct[key]
                        va['id'] = x
                        values.append(va)
                        x = x + 1

    df = pd.DataFrame(metrics)
    df1 = pd.DataFrame(values)
    df = pd.merge(df, df1, how='inner', left_on=['id'], right_on=['id'])
    df['time'] = pd.to_datetime(df['time'], unit='s')

    df = df.drop(['endpoint', 'service', 'id'], axis=1)
    write_pandas_dataframe_to_blob(
        blob_service, df, container,
        str((datetime.now()).date()) + '/' +
        str(datetime.now().time()).replace(':', '').replace(".", ''))
예제 #26
0
 def __init__(self,
              account_name=None,
              account_key=None,
              sas_token=None,
              connection_string=None,
              **storage_options):
     account_name = account_name or os.environ.get(
         "AZURE_BLOB_ACCOUNT_NAME")
     account_key = account_key or os.environ.get("AZURE_BLOB_ACCOUNT_KEY")
     sas_token = sas_token or os.environ.get("AZURE_BLOB_SAS_TOKEN")
     connection_string = connection_string or os.environ.get(
         "AZURE_BLOB_CONNECTION_STRING")
     print(account_name, account_key)
     self.connection = BlockBlobService(
         account_name=account_name,
         account_key=account_key,
         sas_token=sas_token,
         connection_string=connection_string,
         protocol=storage_options.get("protocol") or "https",
         endpoint_suffix=storage_options.get("endpoint_suffix"),
         custom_domain=storage_options.get("custom_domain"))
     self.sep = "/"
def handler(blob):
    all_rows = [HEADER_ROW]
    key = blob.name.replace(container_name + "/", "")
    event_type = "Arrival"
    if "Depart" in key: event_type = "Departure"
    date = normalize_date(' '.join(key.split()[0:2]))
    outputfile = tempfile.NamedTemporaryFile(mode='r+', delete=False)
    excel2csv(blob, outputfile)
    rows = rowify(outputfile, date, event_type)
    print("Processed file {} with {} entries".format(key, len(rows)))
    all_rows.extend(rows)
    csv_rows = [",".join(row) for row in all_rows]

    file_extension = re.search('.+\.(.+?)$', key).group(1)
    new_file_name = 'translated/' + key.replace(file_extension, 'csv')
    block_blob_service = BlockBlobService(account_name=acct_name,
                                          account_key=acct_key)

    upload_data = ''
    for r in csv_rows:
        upload_data = upload_data + r + '\n'
    block_blob_service.create_blob_from_text(container_name=container_name,
                                             blob_name=new_file_name,
                                             text=str(upload_data))
    def create_block_blob_service(self):
        '''
        Creates a BlockBlobService object with the settings specified in the 
        CloudStorageAccount.

        :return: A service object.
        :rtype: :class:`~azure.storage.blob.blockblobservice.BlockBlobService`
        '''
        try:
            from azure.storage.blob.blockblobservice import BlockBlobService
            return BlockBlobService(self.account_name, self.account_key,
                                    sas_token=self.sas_token,
                                    is_emulated=self.is_emulated)
        except ImportError:
            raise Exception('The package azure-storage-blob is required. '
                            + 'Please install it using "pip install azure-storage-blob"')
예제 #29
0
    def block_blob_service(self):
        ACCOUNT_NAME = os.environ['ACCOUNT_NAME']
        ACCOUNT_KEY = os.environ['ACCOUNT_KEY']

        block_blob_service = BlockBlobService(account_name=ACCOUNT_NAME,
                                              account_key=ACCOUNT_KEY)

        block_blob_service.create_container(self.container_name)

        block_blob_service.set_container_acl(
            self.container_name, public_access=PublicAccess.Container)
        return block_blob_service
예제 #30
0
def test_processes_many_deletions(collect):
    # Generate a target list of keys in a stable order
    target = sorted(['test-key-' + str(x) for x in range(20001)])

    # Construct boto S3 Keys from the generated names and delete them
    # all.
    blobs = [B(name=key_name) for key_name in target]
    d = wabs_deleter.Deleter(BlockBlobService('test', 'ing'), 'test-container')

    for b in blobs:
        d.delete(b)

    d.close()

    # Sort the deleted key names to obtain another stable order and
    # then ensure that everything was passed for deletion
    # successfully.
    assert sorted(collect.deleted_keys) == target
예제 #31
0
class AzureStorageContainer(Common.Contracts.StorageContainer):

    def __init__(self, container_name, config: AzureStorageConfig):
        self._container_name = container_name
        self._blob_service = BlockBlobService(
            account_name=config.account_name,
            account_key=config.account_key
        )
        self._blob_service.create_container(self._container_name)

    def upload_text(self, blob_name, text):
        self._blob_service.create_blob_from_text(self._container_name, blob_name, text)

    def list_blobs(self):
        return self._blob_service.list_blobs(self._container_name)

    def get_blob_to_text(self, file_name):
        return self._blob_service.get_blob_to_text(self._container_name, file_name)
예제 #32
0
def test_retry_on_normal_error(collect):
    """Ensure retries are processed for most errors."""
    key_name = 'test-key-name'
    b = B(name=key_name)

    collect.inject(Exception('Normal error'))
    d = wabs_deleter.Deleter(BlockBlobService('test', 'ing'), 'test-container')
    d.delete(b)

    # Since delete_keys will fail over and over again, aborted_keys
    # should grow quickly.
    while len(collect.aborted_keys) < 2:
        gevent.sleep(0.1)

    # Since delete_keys has been failing repeatedly, no keys should be
    # successfully deleted.
    assert not collect.deleted_keys

    # Turn off fault injection and flush/synchronize with close().
    collect.inject(None)
    d.close()

    # The one enqueued job should have been processed.n
    assert collect.deleted_keys == [key_name]
예제 #33
0
def apathetic_container_delete(container_name, *args, **kwargs):
    conn = BlockBlobService(*args, **kwargs)
    conn.delete_container(container_name)

    return conn