def table_sas(self):
        table_name = self._create_table()
        entity = {
            'PartitionKey': 'test',
            'RowKey': 'test1',
            'text': 'hello world',
            }
        self.service.insert_entity(table_name, entity)

        # Access only to the entities in the given table
        # Query permissions to access entities
        # Expires in an hour
        token = self.service.generate_table_shared_access_signature(
            table_name,
            TablePermissions.QUERY,
            datetime.utcnow() + timedelta(hours=1),
        )

        # Create a service and use the SAS
        sas_service = TableService(
            account_name=self.account.account_name,
            sas_token=token,
        )

        entities = sas_service.query_entities(table_name)
        for entity in entities:
            print(entity.text) # hello world

        self.service.delete_table(table_name)
Exemple #2
0
def main():
    blob_container = request.json["container"]
    blob_id = request.json["id"]

    # Load up the .json from blob service
    blob_service = BlockBlobService(account_name=_storage_account,
                                    account_key=_storage_key)
    blob = blob_service.get_blob_to_text(blob_container, blob_id)

    # verbatims is a list of strings
    verbatims = json.loads(blob.content)

    # Generate a UUID for this job, since it's going to be a long running task
    # we're going to return the id to the caller and track job status is the table 'ldajobs'
    jobid = str(uuid.uuid4())

    # Create the table row for this job, initially status is 'started'
    table_service = TableService(account_name=_storage_account,
                                 account_key=_storage_key)
    table_service.create_table("ldajobs")
    task = {'PartitionKey': 'lda_jobs', 'RowKey': jobid, 'status': 'started'}
    table_service.insert_entity('ldajobs', task)

    # Actually start the job
    threading.Thread(target=lda, args=(
        jobid,
        verbatims,
    )).start()

    # .. and immediately return the jobid to the caller
    return Response("%s verbatims now processing" % len(verbatims),
                    status=200,
                    mimetype='plain/text')
    def sas_with_signed_identifiers(self):
        table_name = self._create_table()
        entity = {
            'PartitionKey': 'test',
            'RowKey': 'test1',
            'text': 'hello world',
            }
        self.service.insert_entity(table_name, entity)

        # Set access policy on table
        access_policy = AccessPolicy(permission=TablePermissions.QUERY,
                                     expiry=datetime.utcnow() + timedelta(hours=1))
        identifiers = {'id': access_policy}
        acl = self.service.set_table_acl(table_name, identifiers)

        # Wait 30 seconds for acl to propagate
        time.sleep(30)

        # Indicates to use the access policy set on the table
        token = self.service.generate_table_shared_access_signature(
            table_name,
            id='id'
        )

        # Create a service and use the SAS
        sas_service = TableService(
            account_name=self.account.account_name,
            sas_token=token,
        )

        entities = list(sas_service.query_entities(table_name))
        for entity in entities:
            print(entity.text) # hello world

        self.service.delete_table(table_name)
Exemple #4
0
def receiver():
    bus_service = ServiceBusService(
        service_namespace='comp6905',
        shared_access_key_name='RootManageSharedAccessKey',
        shared_access_key_value='rK2FMzVKvCjpad7xVSj1AB3hDimhxZq3WtEE4y28yaM=')
    table_service = TableService(
        account_name='comp6905kirk',
        account_key=
        'H1YuP8hBxJ2PKw2hoW4Dr+DMAMvKZ/nGhstHw+87mE+OSBTb23cBxhkUvILgKOHWHA3hi3oaoohwVkp6lOXOlA=='
    )
    while (True):
        msg = bus_service.receive_queue_message('queue1', peek_lock=False)
        msg1 = msg.body.decode("utf-8")
        print(msg1)
        parsed_json = json.loads(msg1)
        #print(parsed_json['UserId'
        task = {
            'PartitionKey': 'Zanko',
            'RowKey': parsed_json['TransactionID'],
            'UserId': parsed_json['UserId'],
            'SellerId': parsed_json['SellerID'],
            'ProductName': parsed_json['Product Name'],
            'SalePrice': parsed_json['Sale Price'],
            'TransactionDate': parsed_json['Transaction Date']
        }
        table_service.insert_entity('Requests', task)
Exemple #5
0
def get_last_run_id():
    table_service = TableService(account_name=STORAGE_ACCOUNT_NAME, account_key=STORAGE_ACCOUNT_KEY)
    databricks_cluster_details_entries = table_service.query_entities('databricks', filter="PartitionKey eq 'pdm'")
    databricks_cluster_details = list(databricks_cluster_details_entries)
    if databricks_cluster_details:
        return databricks_cluster_details[0]['run_id']
    return None
def main(req: func.HttpRequest) -> func.HttpResponse:
    logging.info('Python HTTP trigger function processed a request.')

    accountName = ""
    accountKey = ""

    # Get params
    deviceName = req.params.get('device-name')
    if not deviceName or deviceName == "" or not deviceName.isalnum():
        return func.HttpResponse("Unable to parse requested device name",
                                 status_code=400)

    # Instantiate db connection
    tableName = 'AirSamples'
    tableService = None
    try:
        tableService = TableService(account_name=accountName,
                                    account_key=accountKey)
    except Exception as error:
        logging.info(error)
        return func.HttpResponse("Unable to connect to Azure Table",
                                 status_code=500)

    logging.info("Requested summary data for device " + deviceName)

    # Return response
    jsonResponse = getDeviceSummary(tableName, tableService, deviceName)
    return func.HttpResponse(jsonResponse, mimetype="application/json")
    def account_sas(self):
        table_name = self._create_table()
        entity = {
            'PartitionKey': 'test',
            'RowKey': 'test1',
            'text': 'hello world',
            }
        self.service.insert_entity(table_name, entity)

        # Access to all entities in all the tables
        # Expires in an hour
        token = self.service.generate_account_shared_access_signature(
            ResourceTypes.OBJECT,
            AccountPermissions.READ,
            datetime.utcnow() + timedelta(hours=1),
        )

        # Create a service and use the SAS
        sas_service = TableService(
            account_name=self.account.account_name,
            sas_token=token,
        )

        entities = list(sas_service.query_entities(table_name))
        for entity in entities:
            print(entity.text) # hello world

        self.service.delete_table(table_name)
Exemple #8
0
    def test_sas_delete(self):
        # SAS URL is calculated from storage key, so this test runs live only
        if TestMode.need_recordingfile(self.test_mode):
            return

        # Arrange
        entity = self._insert_random_entity()
        token = self.ts.generate_table_shared_access_signature(
            self.table_name,
            TablePermissions.DELETE,
            datetime.utcnow() + timedelta(hours=1),
        )

        # Act
        service = TableService(
            account_name=self.settings.STORAGE_ACCOUNT_NAME,
            sas_token=token,
        )
        self._set_service_options(service, self.settings)
        service.delete_entity(self.table_name, entity.PartitionKey,
                              entity.RowKey)

        # Assert
        with self.assertRaises(AzureMissingResourceHttpError):
            self.ts.get_entity(self.table_name, entity.PartitionKey,
                               entity.RowKey)
Exemple #9
0
    def __init__(self, **kwargs):

        self._storage_name = kwargs.get('AZURE_STORAGE_NAME', '')
        self._storage_key = kwargs.get('AZURE_STORAGE_KEY', '')

        """ service init """
        self._models = []
        if self._storage_key != '' and self._storage_name != '':
            self._tableservice = TableService(account_name = self._storage_name, account_key = self._storage_key, protocol='https')

        """ encrypt queue service """
        if kwargs.get('AZURE_REQUIRE_ENCRYPTION', False):

            # Create the KEK used for encryption.
            # KeyWrapper is the provided sample implementation, but the user may use their own object as long as it implements the interface above.
            kek = KeyWrapper(kwargs.get('AZURE_KEY_IDENTIFIER', 'otrrentapi'), kwargs.get('SECRET_KEY', 'super-duper-secret')) # Key identifier

            # Create the key resolver used for decryption.
            # KeyResolver is the provided sample implementation, but the user may use whatever implementation they choose so long as the function set on the service object behaves appropriately.
            key_resolver = KeyResolver()
            key_resolver.put_key(kek)

            # Set the require Encryption, KEK and key resolver on the service object.
            self._encryptproperties = True
            self._tableservice.key_encryption_key = kek
            self._tableservice.key_resolver_funcion = key_resolver.resolve_key
            self._tableservice.encryption_resolver_function = self.__encryptionresolver__


        pass
Exemple #10
0
    def test_sas_add_outside_range(self):
        # SAS URL is calculated from storage key, so this test runs live only
        if TestMode.need_recordingfile(self.test_mode):
            return

        # Arrange

        token = self.ts.generate_table_shared_access_signature(
            self.table_name,
            TablePermissions.ADD,
            datetime.utcnow() + timedelta(hours=1),
            start_pk='test',
            start_rk='test1',
            end_pk='test',
            end_rk='test1',
        )

        # Act
        service = TableService(
            account_name=self.settings.STORAGE_ACCOUNT_NAME,
            sas_token=token,
        )
        self._set_service_options(service, self.settings)
        with self.assertRaises(AzureHttpError):
            entity = self._create_random_entity_dict()
            service.insert_entity(self.table_name, entity)
Exemple #11
0
    def test_sas_update(self):
        # SAS URL is calculated from storage key, so this test runs live only
        if TestMode.need_recordingfile(self.test_mode):
            return

        # Arrange
        entity = self._insert_random_entity()
        token = self.ts.generate_table_shared_access_signature(
            self.table_name,
            TablePermissions.UPDATE,
            datetime.utcnow() + timedelta(hours=1),
        )

        # Act
        service = TableService(
            account_name=self.settings.STORAGE_ACCOUNT_NAME,
            sas_token=token,
        )
        self._set_service_options(service, self.settings)
        updated_entity = self._create_updated_entity_dict(
            entity.PartitionKey, entity.RowKey)
        resp = service.update_entity(self.table_name, updated_entity)

        # Assert
        received_entity = self.ts.get_entity(self.table_name,
                                             entity.PartitionKey,
                                             entity.RowKey)
        self._assert_updated_entity(received_entity)
Exemple #12
0
    def test_sas_query(self):
        # SAS URL is calculated from storage key, so this test runs live only
        if TestMode.need_recordingfile(self.test_mode):
            return

        # Arrange
        entity = self._insert_random_entity()
        token = self.ts.generate_table_shared_access_signature(
            self.table_name,
            TablePermissions.QUERY,
            datetime.utcnow() + timedelta(hours=1),
            datetime.utcnow() - timedelta(minutes=1),
        )

        # Act
        service = TableService(
            account_name=self.settings.STORAGE_ACCOUNT_NAME,
            sas_token=token,
        )
        self._set_service_options(service, self.settings)
        entities = list(
            self.ts.query_entities(self.table_name,
                                   filter="PartitionKey eq '{}'".format(
                                       entity['PartitionKey'])))

        # Assert
        self.assertEqual(len(entities), 1)
        self._assert_default_entity(entities[0])
Exemple #13
0
    def test_sas_add(self):
        # SAS URL is calculated from storage key, so this test runs live only
        if TestMode.need_recordingfile(self.test_mode):
            return

        # Arrange
        token = self.ts.generate_table_shared_access_signature(
            self.table_name,
            TablePermissions.ADD,
            datetime.utcnow() + timedelta(hours=1),
            datetime.utcnow() - timedelta(minutes=1),
        )

        # Act
        service = TableService(
            account_name=self.settings.STORAGE_ACCOUNT_NAME,
            sas_token=token,
        )
        self._set_service_options(service, self.settings)

        entity = self._create_random_entity_dict()
        service.insert_entity(self.table_name, entity)

        # Assert
        resp = self.ts.get_entity(self.table_name, entity['PartitionKey'],
                                  entity['RowKey'])
        self._assert_default_entity(resp)
Exemple #14
0
    def __init__(self,
                 vm_count=0,
                 sku_type='standard_d2_v2',
                 username='******',
                 password='******'):
        self.vm_count = int(vm_count)
        self.sku_type = sku_type
        self.username = username
        self.password = password
        self.BATCH_ACCOUNT_NAME = os.environ['BATCH_ACCOUNT_NAME']
        BATCH_ACCOUNT_KEY = os.environ['BATCH_ACCOUNT_KEY']
        BATCH_SERVICE_URL = os.environ['BATCH_ACCOUNT_URL']
        STORAGE_ACCOUNT_SUFFIX = 'core.windows.net'
        self.STORAGE_ACCOUNT_NAME = os.environ['STORAGE_ACCOUNT_NAME']
        self.STORAGE_ACCOUNT_KEY = os.environ['STORAGE_ACCOUNT_KEY']

        self.secrets_config = aztk.spark.models.SecretsConfiguration(
            shared_key=aztk.models.SharedKeyConfiguration(
                batch_account_name=self.BATCH_ACCOUNT_NAME,
                batch_account_key=BATCH_ACCOUNT_KEY,
                batch_service_url=BATCH_SERVICE_URL,
                storage_account_name=self.STORAGE_ACCOUNT_NAME,
                storage_account_key=self.STORAGE_ACCOUNT_KEY,
                storage_account_suffix=STORAGE_ACCOUNT_SUFFIX),
            ssh_pub_key="")
        self.table_service = TableService(
            account_name=self.STORAGE_ACCOUNT_NAME,
            account_key=self.STORAGE_ACCOUNT_KEY)
Exemple #15
0
 def __init__(self):
     super(TableBase, self).__init__()
     self.table_service = TableService(
         account_name='boburstorage',
         account_key=
         'wRgukLsyhLtnI7qEk8mSGnIBC+IsiTTXEDF1/xnmBGDudJLSeYdtyuVzuSN5/cplJz88AJPyoVyjCmL9N1ECXw=='
     )
Exemple #16
0
def uploadFile(cp, localfile, container, blob):
    blobsvc = BlockBlobService(account_name=cp.get('storage_account',
                                                   'account_name'),
                               account_key=cp.get('storage_account',
                                                  'account_key'),
                               endpoint_suffix=cp.get('storage_account',
                                                      'endpoint_suffix'))
    tablesvc = TableService(account_name=cp.get('storage_account',
                                                'account_name'),
                            account_key=cp.get('storage_account',
                                               'account_key'),
                            endpoint_suffix=cp.get('storage_account',
                                                   'endpoint_suffix'))
    try:
        blobsvc.create_blob_from_path(
            container,
            blob,
            localfile,
            content_settings=ContentSettings(
                content_type=magic.from_file(localfile, mime=True)))
        METRIC_TABLE = cp.get('storage_account', 'metric_table')
        METRIC_TABLE_PK = cp.get('storage_account', 'metric_table_pk')
        METRIC_BLOB_COUNT_RK = cp.get('storage_account',
                                      'metric_blob_count_rk')
        metric.incrementMetric(1, METRIC_BLOB_COUNT_RK, METRIC_TABLE,
                               METRIC_TABLE_PK, tablesvc)
    except:
        pass
def get_keywords():
    # get table service reference
    account_name = getenv('STORAGE_ACCOUNT')
    account_key = getenv('STORAGE_KEY')
    keyword_table = getenv('KEYWORD_TABLE_NAME')
    table_service = TableService(account_name=account_name,
                                 account_key=account_key)

    # query all keyword entities
    keywords = table_service.query_entities(keyword_table,
                                            filter="PartitionKey eq 'Keyword'")

    # separate each keyword by language
    arKeywords = {}
    enKeywords = {}
    for keyword in keywords:
        # map each keyword by its canonical form (currently lowercase English)
        canonicalKeyword = keyword.en_term.lower()
        # pre-compile regex for each keyword
        arKeywordRegex = create_keyword_regex(keyword.ar_term)
        enKeywordRegex = create_keyword_regex(keyword.en_term)
        arKeywords[canonicalKeyword] = arKeywordRegex
        enKeywords[canonicalKeyword] = enKeywordRegex

    return {'ar': arKeywords, 'en': enKeywords}
Exemple #18
0
 def __init__(self):
     super(TableBase, self).__init__()
     self.table_service = TableService(
         account_name='bobur',
         account_key=
         '6e60FZapOXAmUbFBw0SpE1lHRP3RkXOMYRaalWmRBoz4+xI5tvjaJzxXuYyt+yfWxjPXpz5X3PmyIFiQmSkjbw=='
     )
Exemple #19
0
    def test_sas_signed_identifier(self):
        # SAS URL is calculated from storage key, so this test runs live only
        if TestMode.need_recordingfile(self.test_mode):
            return

        # Arrange
        entity = self._insert_random_entity()

        access_policy = AccessPolicy()
        access_policy.start = '2011-10-11'
        access_policy.expiry = '2018-10-12'
        access_policy.permission = TablePermissions.QUERY
        identifiers = {'testid': access_policy}

        entities = self.ts.set_table_acl(self.table_name, identifiers)

        token = self.ts.generate_table_shared_access_signature(
            self.table_name,
            id='testid',
        )

        # Act
        service = TableService(
            account_name=self.settings.STORAGE_ACCOUNT_NAME,
            sas_token=token,
        )
        self._set_service_options(service, self.settings)
        entities = list(
            self.ts.query_entities(self.table_name,
                                   filter="PartitionKey eq '{}'".format(
                                       entity.PartitionKey)))

        # Assert
        self.assertEqual(len(entities), 1)
        self._assert_default_entity(entities[0])
Exemple #20
0
def get_test_storage() -> TableService:
    akv = AKVConnector("Not used", "Not used", "Not used", env="dev")
    connection_string = akv.get_storage_connection_string()
    logger = logging.getLogger("unit-tests")
    logger.setLevel(logging.INFO)
    storage_account = TableService(connection_string=connection_string)
    return storage_account
 def __init__(
     self,
     account_name=None,
     account_key=None,
     protocol='https',
     table='logs',
     batch_size=0,
     extra_properties=None,
     partition_key_formatter=None,
     row_key_formatter=None,
     is_emulated=False,
 ):
     """
     Initialize the handler.
     """
     logging.Handler.__init__(self)
     self.service = TableService(account_name=account_name,
                                 account_key=account_key,
                                 is_emulated=is_emulated,
                                 protocol=protocol)
     self.meta = {'hostname': gethostname(), 'process': os.getpid()}
     self.table = _formatName(table, self.meta)
     self.ready = False
     self.rowno = 0
     if not partition_key_formatter:
         # default format for partition keys
         fmt = '%(asctime)s'
         datefmt = '%Y%m%d%H%M'
         partition_key_formatter = logging.Formatter(fmt, datefmt)
     self.partition_key_formatter = partition_key_formatter
     if not row_key_formatter:
         # default format for row keys
         fmt = '%(asctime)s%(msecs)03d-%(hostname)s-%(process)d-%(rowno)02d'
         datefmt = '%Y%m%d%H%M%S'
         row_key_formatter = logging.Formatter(fmt, datefmt)
     self.row_key_formatter = row_key_formatter
     # extra properties and formatters for them
     self.extra_properties = extra_properties
     if extra_properties:
         self.extra_property_formatters = {}
         self.extra_property_names = {}
         for extra in extra_properties:
             if _PY3:
                 f = logging.Formatter(fmt=extra, style=extra[0])
             else:
                 f = logging.Formatter(fmt=extra)
             self.extra_property_formatters[extra] = f
             self.extra_property_names[extra] = self._getFormatName(extra)
     # the storage emulator doesn't support batch operations
     if batch_size <= 1 or is_emulated:
         self.batch = None
     else:
         self.batch = TableBatch()
         if batch_size > TableStorageHandler.MAX_BATCH_SIZE:
             self.batch_size = TableStorageHandler.MAX_BATCH_SIZE
         else:
             self.batch_size = batch_size
     if self.batch:
         self.current_partition_key = None
def buscar_meeting_code_processado(meeting_code):
    table_service = TableService(account_name=ACCOUNT_NAME,
                                 account_key=ACCOUNT_KEY)
    records = table_service.query_entities(
        TABLE_TRACKING,
        filter="PartitionKey eq 'audio-analysis' and RowKey eq '" +
        meeting_code + "'")
    return len(records.items) > 0
Exemple #23
0
def get_rows(credentials, result_set):
    ts = TableService(account_name=credentials.account_name,
                      account_key=credentials.account_key)
    for entity in ts.query_entities(result_set.table,
                                    filter=result_set.filter):
        row = entity
        row['account_name'] = credentials.account_name
        yield row
Exemple #24
0
def main(req: func.HttpRequest) -> func.HttpResponse:

    try:
        logging.info("Trigger started")

        ret = {}

        if "code" not in req.params:
            logging.info("Invalid code")

            ret["message"] = "The parameter code is no present in the request."
            ret["status"] = False

            return func.HttpResponse(json.dumps(ret), headers=headers)
        else:
            code = req.params.get('code')

            logging.info("Processing " + str(code) + "...")

            table_service = TableService(account_name=ACCOUNT_NAME,
                                         account_key=ACCOUNT_KEY)
            records = table_service.query_entities(
                TABLE_NAME_TRACKING,
                filter="PartitionKey eq 'tracking-analysis' and RowKey eq '" +
                code + "'")

            if len(records.items) == 0:
                ret["message"] = "Meeting coding not found"
                ret["status"] = False

                logging.info("Code not found.")

                return func.HttpResponse(json.dumps(ret), headers=headers)
            else:

                additional_stop_words = table_service.get_entity(
                    TABLE_NAME_PARAMETERS, "stopwords", "general").Value

                record = records.items[0]
                freq_dist = json.loads(record["FreqDist"])

                words = []
                for word in freq_dist:
                    if freq_dist[word] > 1 and len(
                            word) > 2 and word not in additional_stop_words:
                        words.append({"name": word, "weight": freq_dist[word]})

                ret["message"] = "Code found at the database"
                ret["status"] = True
                ret["words"] = words

                logging.info("Code successfully processed.")

                return func.HttpResponse(json.dumps(ret), headers=headers)

    except Exception as error:
        logging.error(error)
        return func.HttpResponse(error, status_code=400, headers=headers)
def set_last_run_id(run_id):
    table_service = TableService(account_name=STORAGE_ACCOUNT_NAME,
                                 account_key=STORAGE_ACCOUNT_KEY)
    databricks_details = {
        'PartitionKey': 'pdm',
        'RowKey': 'pdm',
        'run_id': str(run_id)
    }
    table_service.insert_or_replace_entity('databricks', databricks_details)
Exemple #26
0
 def CreateTableServices(self):
     """
     Inicializa una instancia del Table Services para poder comunicarse con 
     el storage en Azure
     """
     self.TableService = TableService(
         account_name=self.AccountName,
         connection_string=self.CONNECTION_STRING,
         endpoint_suffix=self.EndPointSuffix)
 def table_service(self):
     if not self._table_service:
         self._table_service = TableService(
             self.storage_account, self.access_key_result.keys[0].value)
     if not self._table_service. \
             exists(table_name=self.public_key_storage_table_name):
         self._table_service.create_table(
             self.public_key_storage_table_name)
     return self._table_service
Exemple #28
0
def updateTask(cp, pk, rk, data):
    tablesvc = TableService(account_name=cp.get('storage_account',
                                                'account_name'),
                            account_key=cp.get('storage_account',
                                               'account_key'),
                            endpoint_suffix=cp.get('storage_account',
                                                   'endpoint_suffix'))
    task = {'PartitionKey': pk, 'RowKey': rk}
    task.update(data)
    tablesvc.merge_entity(cp.get('storage_account', 'task_table'), task)
def get_keywords():
    # get table service reference
    account_name = getenv('STORAGE_ACCOUNT')
    account_key = getenv('STORAGE_KEY')
    keyword_table = getenv('KEYWORD_TABLE_NAME')
    site_name = getenv('SITE_NAME')

    table_service = TableService(account_name=account_name,
                                 account_key=account_key)

    filter_str = "PartitionKey eq '%s'" % (site_name)
    # query all keyword entities
    keywords = table_service.query_entities(keyword_table, filter=filter_str)

    # TODO: automate the language detection
    # separate each keyword by language
    deKeywords = {}
    urKeywords = {}
    arKeywords = {}
    enKeywords = {}
    idKeywords = {}
    for keyword in keywords:
        canonicalKeyword = keyword.name.encode('UTF-8').lower()
        if hasattr(keyword, 'name_de'):
            # pre-compile regex for each keyword
            deKeywordRegex = create_keyword_regex(
                keyword.name_de.encode('UTF-8'))
            deKeywords[canonicalKeyword] = deKeywordRegex
        if hasattr(keyword, 'name_ur'):
            # pre-compile regex for each keyword
            urKeywordRegex = create_keyword_regex(
                keyword.name_ur.encode('UTF-8'))
            urKeywords[canonicalKeyword] = urKeywordRegex
        if hasattr(keyword, 'name_ar'):
            # pre-compile regex for each keyword
            arKeywordRegex = create_keyword_regex(
                keyword.name_ar.encode('UTF-8'))
            arKeywords[canonicalKeyword] = arKeywordRegex
        if hasattr(keyword, 'name_id'):
            # pre-compile regex for each keyword
            idKeywordRegex = create_keyword_regex(
                keyword.name_id.encode('UTF-8'))
            idKeywords[canonicalKeyword] = idKeywordRegex
        if hasattr(keyword, 'name_en'):
            # pre-compile regex for each keyword
            enKeywordRegex = create_keyword_regex(
                keyword.name_en.encode('UTF-8'))
            enKeywords[canonicalKeyword] = enKeywordRegex
    return {
        'de': deKeywords,
        'en': enKeywords,
        'id': idKeywords,
        'ur': urKeywords,
        'ar': arKeywords
    }
 def get_table_service(self, my_azure_profile):
     self.debug("ENTER - " + "get_table")
     if my_azure_profile == 'local-table':
         table_service = TableService(
             connection_string=
             'DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;TableEndpoint=http://127.0.0.1:10002/devstoreaccount1;'
         )
     elif my_azure_profile == 'azure':
         azure_storage_connection_string = os.environ[
             'AZURE_CONNECTION_STRING']
         #azure_storage_account_name = os.environ['AZURE_STORAGE_ACCOUNT']
         #azure_storage_account_key = os.environ['AZURE_STORAGE_KEY']
         #table_service = TableService(account_name = azure_storage_account_name, account_key=azure_storage_account_key)
         table_service = TableService(
             connection_string=azure_storage_connection_string)
     else:
         self.debug("Unknown profile: " + my_azure_profile)
         sys.exit(-1)
     self.debug("EXIT - " + "get_table")
     return table_service