Exemplo n.º 1
0
    def test_update_releases(self, mock_now, mock_list_containers):
        # Mock time
        mock_now.return_value = pendulum.datetime(year=2020,
                                                  month=5,
                                                  day=1,
                                                  minute=10)

        # Mock containers
        containers_in = make_containers()
        mock_list_containers.return_value = containers_in

        # Mock fetching of containers
        client = MagArchiverClient(account_name=self.account_name,
                                   account_key=self.account_key)
        containers = client.list_containers(last_modified_thresh=1)

        try:
            # Update releases based on containers
            num_updated, num_errors = client.update_releases(containers)
            self.assertEqual(num_updated, 2)
            self.assertEqual(num_errors, 0)
        finally:
            # Clean up
            service = TableService(account_name=self.account_name,
                                   account_key=self.account_key)
            for container in containers:
                service.delete_entity(MagRelease.TABLE_NAME, 'mag',
                                      container.name.replace("mag-", ""))
Exemplo n.º 2
0
class AzureTableProvider:
    def __init__(self, account_name, key, table_name):
        self.target_table = table_name

        if not account_name or not key:
            raise Exception('Account or key not specified')

        self.table_service = TableService(account_name=account_name, account_key=key)
        self.table_service.create_table(self.target_table)

    def get_all(self):
        return self.table_service.query_entities(self.target_table)

    def remove(self, item):
        query_str = "Link eq '%s'" % item
        tasks = self.table_service.query_entities(self.target_table, filter=query_str)
        if any(tasks):
            for task in tasks:
                self.table_service.delete_entity(self.target_table, task.PartitionKey, task.RowKey)
                return True
        return False

    def add(self, item):
        track = {
            'PartitionKey': 'MusicBotEntry',
            'RowKey': str(uuid.uuid4()),
            'Link': item
        }
        self.table_service.insert_entity(self.target_table, track)
Exemplo n.º 3
0
def main(req: func.HttpRequest) -> func.HttpResponse:
    logging.info('Unregister request recieved.')

    server_id = req.params.get('server-id')
    region = req.params.get('region')

    if not server_id:
        return http_utils.create_function_response(
            {'message': 'Missing required parameter: server-id'},
            status_code=400)

    if not region:
        return http_utils.create_function_response(
            {'message': 'Missing required parameter: region'}, status_code=400)

    try:
        table_name = 'servers'
        table_service = TableService(
            connection_string=os.environ['AzureWebJobsStorage'])
        table_service.delete_entity(table_name, region, server_id)

        return http_utils.create_function_response(
            {'message': f'Server {server_id} successfully unregistered.'}, 200)
    except:
        return http_utils.create_function_response(
            {'message': f'Server {server_id} not found.'}, 400)
Exemplo n.º 4
0
def delete_orphan_record(partition_key, short_id, instance_name, account_key):
    start = time.time()

    table_service = TableService(account_name=instance_name, account_key=account_key)
    table_service.delete_entity('orphans', partition_key, short_id)

    print "delete_orphan_record completed in {} sec".format(time.time()-start)
Exemplo n.º 5
0
class AzureCosmosDb(TableStorage):
    def __init__(self, config: AzureCosmosDbConfig):
        self._tableService = TableService(account_name=config.account_name,
                                          account_key=config.account_key)
        self._tableName = config.table_name

    def check_entry_exists(self, entry):
        try:
            self.query(entry['PartitionKey'], entry['RowKey'])
            return True
        except:
            return False

    def write(self, entry):
        prepared = entry_storage.EntryOperations.prepare_entry_for_insert(
            entry)

        if not self.check_entry_exists(prepared):
            self._tableService.insert_entity(self._tableName, prepared)
        else:
            self._tableService.update_entity(self._tableName, prepared)

    def query(self, partitionkey, rowkey):
        task = self._tableService.get_entity(self._tableName, partitionkey,
                                             rowkey)
        return task

    def delete(self, partitionkey, rowkey):
        self._tableService.delete_entity(self._tableName, partitionkey, rowkey)
Exemplo n.º 6
0
def migrate_notification_keys(table_service: TableService) -> None:
    table_name = "Notification"
    notifications = table_service.query_entities(
        table_name, select="PartitionKey,RowKey,config")
    partitionKey = None

    count = 0
    for entry in notifications:
        try:
            UUID(entry.PartitionKey)
            continue
        except ValueError:
            pass

        table_service.insert_or_replace_entity(
            table_name,
            {
                "PartitionKey": entry.RowKey,
                "RowKey": entry.PartitionKey,
                "config": entry.config,
            },
        )
        table_service.delete_entity(table_name, entry.PartitionKey,
                                    entry.RowKey)
        count += 1

    print("migrated %s rows" % count)
def store_predictions_in_table(predictions,
                               times,
                               table_name="predictedSoilMoistureMessages"):

    # Connect to account
    table_service = TableService(
        account_name='soilhumiditydata293s',
        account_key=
        '4PSsEO1xBAIdq3/MppWm+t6eYHi+CWhVn6xNZ6i4mLVgm50K8+NK6lA94v8MxG0bvVEfYCvsv1suxCyCnUYd0A=='
    )

    # Delete existing table predictions
    table = table_service.query_entities(table_name)
    for entry in table:
        table_service.delete_entity(table_name, entry['PartitionKey'],
                                    entry['RowKey'])  #'tasksSeattle', '001')

    # Store values in table
    for i in range(len(predictions)):
        new_entity = Entity()
        new_entity.PartitionKey = datetime.strftime(times[i],
                                                    "%Y-%m-%d %H:%M:%S")
        new_entity.RowKey = str(i)
        new_entity['soilmoistureprediction'] = str(predictions[i])

        table_service.insert_entity(table_name, new_entity)
Exemplo n.º 8
0
def main(req: func.HttpRequest) -> func.HttpResponse:
    logging.info('Python HTTP Submit trigger received a request')

    logging.debug('Creating blob service')
    table_service = TableService(
        account_name=os.getenv('AZURE_STORAGE_ACCOUNT'),
        account_key=os.getenv('AZURE_STORAGE_ACCESS_KEY')
    )

    headers_dict = {
            "Access-Control-Allow-Credentials": "true",
            "Access-Control-Allow-Origin": "*",
            "Access-Control-Allow-Methods": "Post"
    }
    schema = getjob_schema.GetJobSchema()
    try:
        getjob_dict = schema.loads(req.get_body())
    except ValidationError:
        return func.HttpResponse(f'Failed to validate getjob schema',
                                 headers=headers_dict,
                                 status_code=400
                                 )
    if not getjob_dict['num_messages'] == 1:
        return func.HttpResponse(f'Number of messages should be 1',
                                 headers=headers_dict,
                                 status_code=400
                                 )
    table_name = os.getenv('AZURE_TABLE_NAME')
    entity = None
    entities = table_service.query_entities(table_name, filter="PartitionKey eq 'await'")
    for entity in entities:
        break
    if not entity:
        return func.HttpResponse(f'No job found',
                                 headers=headers_dict,
                                 status_code=400
                                 )
    message = {}
    message['crop'] = entity.crop
    message['geometry'] = json.loads(entity.geometry)
    message['irrigated'] = entity.irrigated
    message['guid'] = entity.RowKey
    message['area_name'] = entity.area_name
    message['planting_date'] = entity.planting_date
    message['fraction'] = entity.fraction

    table_service.delete_entity(table_name, entity.PartitionKey, entity.RowKey)
    entity.PartitionKey = 'processing'

    table_service.insert_entity(table_name, entity)

    return func.HttpResponse(json.dumps(message),
                             headers=headers_dict,
                             mimetype='application/json'
                             )
Exemplo n.º 9
0
class AzureTable():
    def __init__(self, account_name, account_key):
        self.table_service = TableService(account_name=account_name,
                                          account_key=account_key)

    def create_table(self, table_name):
        return self.table_service.create_table(table_name)

    def exists_table(self, table_name):
        return self.table_service.exists(table_name)

    def insert_or_replace_entity(self, table_name, partition_key, row_key,
                                 **kwargs):
        try:
            entity = self.table_service.get_entity(table_name, partition_key,
                                                   row_key)
        except Exception:
            # Insert a new entity
            entity = {'PartitionKey': partition_key, 'RowKey': row_key}

        for (k, v) in kwargs.items():
            entity[k] = v

        return self.table_service.insert_or_replace_entity(table_name, entity)

    def insert_or_replace_entity2(self, table_name, entity):
        return self.table_service.insert_or_replace_entity(table_name, entity)

    def insert_entity(self, table_name, entity):
        return self.table_service.insert_entity(table_name, entity)

    def update_entity(self, table_name, entity):
        return self.table_service.update_entity(table_name, entity)

    def get_entity(self, table_name, partition_key, row_key):
        return self.table_service.get_entity(table_name, partition_key,
                                             row_key)

    def delete_entity(self, table_name, partition_key, row_key):
        self.table_service.delete_entity(table_name, partition_key, row_key)

    def delete_table(self, table_name):
        return self.table_service.delete_table(table_name)

    def get_entities(self, table_name, partition_key):
        filter = "PartitionKey eq '{0}'".format(partition_key)
        return self.table_service.query_entities(table_name, filter)
Exemplo n.º 10
0
def main(req: func.HttpRequest) -> func.HttpResponse:
    logging.info('Python HTTP Submit trigger received a request')

    logging.debug('Creating blob service')

    headers_dict = {
            "Access-Control-Allow-Credentials": "true",
            "Access-Control-Allow-Origin": "*",
            "Access-Control-Allow-Methods": "Post"
    }
    schema = put_schema.DoneSchema()
    try:
        done_dict = schema.loads(req.get_body())
    except ValidationError:
        error = f'Failed to validate the done message'
        return func.HttpResponse(error,
                                 headers=headers_dict,
                                 status_code=400
                                 )

    table_name = os.getenv('AZURE_TABLE_NAME')
    table_service = TableService(
        account_name=os.getenv('AZURE_STORAGE_ACCOUNT'),
        account_key=os.getenv('AZURE_STORAGE_ACCESS_KEY')
    )
    try:
        entity = table_service.get_entity(table_name, 'processing', done_dict['guid'])
    except AzureMissingResourceHttpError:
        error = f'Failed to put done message'
        return func.HttpResponse(error,
                                 headers=headers_dict,
                                 status_code=400
                                 )
    if not done_dict['error']:
        entity.Error = ""
    else:
        entity.Error = done_dict['error']
    table_service.delete_entity(table_name, entity.PartitionKey, entity.RowKey)
    entity.PartitionKey = 'done'
    table_service.insert_entity(table_name, entity)

    return func.HttpResponse('Message was successfully inserted into Done queue',
                             headers=headers_dict
                             )
Exemplo n.º 11
0
def main(req: func.HttpRequest) -> func.HttpResponse:
    logging.info('Starting delete rows.')

    table_name = req.headers.get('name')
    column = req.headers.get('column')
    pattern = req.headers.get('pattern')
    if not table_name:  #If name wasnt added as header, search for it in the parameters
        table_name = req.params.get('name')
    if not column:  #If column wasnt added as header, search for it in the parameters
        column = req.params.get('column')
    if not pattern:  #If pattern wasnt added as header, search for it in the parameters
        pattern = req.params.get('pattern')
    ret = dict()

    if table_name:
        retrieved_secret = getConnectionString()

        table_service = TableService(connection_string=retrieved_secret.value)
        df = get_dataframe_from_table_storage_table(table_service, table_name)

        #Filter dataframe by pattern
        to_delete = df[df[column].str.contains(pattern).fillna(value=False)]

        #Loop over the dataframe and delete records
        for i, o in to_delete.iterrows():
            logging.info('Deleting {}'.format(i+1))
            table_service.delete_entity(table_name,
                                        partition_key=o['PartitionKey'],
                                        row_key=o['RowKey'])
        ret['result'] = "Deleted {} rows!".format(to_delete.shape[0])
        return func.HttpResponse(
             json.dumps(ret),
             status_code=200
        )
    else:
        ret['result'] = "Please pass a table name!!"
        return func.HttpResponse(
             json.dumps(ret),
             status_code=400
        )
Exemplo n.º 12
0
class AzureTable(object):
    def __init__(self, account_name: str, account_key: str, table_name: str,
                 partition_key_field: str, clustering_key_field: str):
        self.table = TableService(account_name=account_name,
                                  account_key=account_key)
        self.table_name = self.table_name
        self.partition_key_field = partition_key_field
        self.clustering_key_field = clustering_key_field

    @property
    def partition_key_name(self) -> str:
        return 'PartitionKey'

    @property
    def clustering_key_name(self) -> str:
        return 'RowKey'

    def get_payload(self, payload: dict):
        item = deepcopy(payload)
        partition_key = payload.get(self.partition_key_field)
        clustering_key = payload.get(self.clustering_key_field)
        if partition_key is None:
            raise PartitionKeyNotFoundError(
                'payload={} does not have a partition key')
        if clustering_key is None:
            raise ClusteringKeyNotFoundError(
                'payload={} does not have a clustering key')

        item.update({
            self.partition_key_name: partition_key,
            self.clustering_key_name: clustering_key
        })

        return item

    def create(self):
        return self.table.create_table(self.table_name)

    def insert(self, item: dict):
        return self.table.insert_entity(self.table_name,
                                        self.get_payload(item))

    def update(self, item: dict):
        return self.table.update_entity(self.table_name,
                                        self.get_payload(item))

    def upsert(self, item: dict):
        return self.table.insert_or_replace_entity(self.table_name,
                                                   self.get_payload(item))

    def delete(self, partition_key: str, clustering_key: str):
        return self.table.delete_entity(self.table_name,
                                        partition_key=partition_key,
                                        row_key=clustering_key)

    def read(self, partition_key: str, clustering_key: str):
        return self.table.get_entity(self.table_name,
                                     partition_key=partition_key,
                                     row_key=clustering_key)

    def insert_batch(self, items: list):
        batch = TableBatch()
        for item in items:
            batch.insert_entity(self.get_payload(item))

        return self.table.commit_batch(self.table_name, batch)

    def get(self, partition_key: str, clustering_key: str):
        return self.table.get_entity(self.table_name, partition_key,
                                     clustering_key)

    def get_by_partition(self, partition_key: str) -> list:
        return self.table.query_entities(self.table_name,
                                         filter="{} eq '{}'".format(
                                             self.partition_key_name,
                                             partition_key))
Exemplo n.º 13
0
class CosmosData:
    # Setup initial variables
    def __init__(self, account_key, identifier):
        self.account_key = account_key
        self.identifier = identifier
        self.account_name = self.identifier + "-cosmos-db"
        self.table_endpoint = "https://" + self.identifier + "-cosmos-db.table.cosmos.azure.com:443/"
        self.connection_string = "DefaultEndpointsProtocol=https;AccountName=" + self.account_name + ";AccountKey=" + self.account_key + ";TableEndpoint=" + self.table_endpoint + ";"
        self.table_service = TableService(
            endpoint_suffix="table.cosmos.azure.com",
            connection_string=self.connection_string)
        self.table_name = self.identifier + "-cosmos-table"

    # Get a list of values
    def get_list(self, list_name, provider=None):
        pkey = ""
        if provider is not None:
            pkey = "|" + provider
        main_list = []
        if list_name:
            try:
                items = self.table_service.query_entities(
                    self.table_name,
                    filter="PartitionKey eq '" + list_name + "'")
            except ValueError:
                pass
            else:
                for item in items:
                    if provider is None:
                        main_list.append(str(item.RowKey))
                    elif str(item.RowKey).endswith(provider):
                        main_list.append(str(item.RowKey).replace(pkey, ""))

        return main_list

    # Add a value to a list
    def add_to_list(self, list_name, value, provider=None):
        pkey = ""
        if provider is not None:
            pkey = "|" + provider
        item = Entity()
        item.PartitionKey = list_name
        item.RowKey = value + pkey
        main_list = self.get_list(list_name, provider)
        try:
            self.table_service.insert_entity(self.table_name, item)
        except ValueError:
            pass
        else:
            main_list.append(value)

        return main_list

    # Remove a value from a list
    def remove_from_list(self, list_name, value, provider=None):
        pkey = ""
        if provider is not None:
            pkey = "|" + provider
        main_list = self.get_list(list_name, provider)
        try:
            self.table_service.delete_entity(self.table_name, list_name,
                                             value + pkey)
        except ValueError:
            pass
        else:
            main_list.remove(value)

        return main_list

    # Get a value from a K/V pair
    def get_value(self, key):
        retval = ""
        try:
            items = self.table_service.query_entities(
                self.table_name, filter="PartitionKey eq '" + key + "'")
        except ValueError:
            pass
        else:
            for item in items:
                retval = item.RowKey

        return retval

    # Set a value for a K/V pair
    def set_value(self, key, value):
        item = Entity()
        item.PartitionKey = key
        item.RowKey = value
        retval = False

        try:
            entries = self.table_service.query_entities(
                self.table_name, filter="PartitionKey eq '" + key + "'")
            old_value = "invalid"
            for entry in entries:
                old_value = entry.RowKey
            self.table_service.delete_entity(self.table_name, key, old_value)
            self.table_service.insert_entity(self.table_name, item)
        except ValueError:
            pass
        else:
            retval = True

        return retval

    # Delete all data and load in the default data set
    def data_load(self):
        data_set = {
            "required-modules": [
                "custom-vnet|azurerm", "custom-sg|azurerm",
                "custom-blob|azurerm", "custom-vpc|aws", "custom-sg|aws"
            ],
            "approved-instances": [
                "Standard_A1_v2|azurerm", "Standard_A2_v2|azurerm",
                "Standard_A4_v2|azurerm", "Standard_A8_v2|azurerm",
                "t3.micro|aws", "t3.small|aws", "t3.medium|aws", "t3.large|aws"
            ],
            "prohibited-resources": [
                "azurerm_resource_group|azurerm",
                "azurerm_virtual_network|azurerm",
                "azurerm_network_security_group|azurerm",
                "azurerm_subnet_network_security_group_association|azurerm",
                "aws_internet_gateway|aws", "aws_route|aws",
                "aws_route_table|aws", "aws_route_table_association|aws",
                "aws_subnet|aws", "aws_vpc|aws", "aws_security_group|aws"
            ],
            "allowed-resources": [
                "azurerm_virtual_machine|azurerm",
                "azurerm_network_interface|azurerm",
                "azurerm_public_ip|azurerm", "azurerm_storage_account|azurerm",
                "aws_instance|aws", "aws_s3_bucket|aws",
                "aws_s3_bucket_policy|aws"
            ],
            "prevent-deletion": ["true"],
            "default-provider": ["azurerm"],
            "mandatory-tags": ["Department", "Environment"],
            "max-cost": ["100"],
            "ddb-encryption": ["true"],
            "no-star-access": ["true"]
        }

        # delete all entries
        items = self.table_service.query_entities(self.table_name)
        for itm in items:
            self.table_service.delete_entity(self.table_name, itm.PartitionKey,
                                             itm.RowKey)

        # add all entries
        for category in data_set:
            for value in data_set[category]:
                item = Entity()
                item.PartitionKey = category
                item.RowKey = value
                self.table_service.insert_entity(self.table_name, item)

        return True
print(task.description)
print(task.priority)

# Query a set of entities
print("Query set of entities...")
tasks = table_service.query_entities(
    'tasktable', filter="PartitionKey eq 'tasksSeattle'")
for task in tasks:
    print(task.description)
    print(task.priority)
    try:
       print(task.newCol)
    except AttributeError:
       print("No newCol.")

# Query a subset of entity properties
print("Query a subset of entity properties...")
tasks = table_service.query_entities(
    'tasktable', filter="PartitionKey eq 'tasksSeattle'", select='description')
for task in tasks:
    print(task.description)

# Delete an entity
print("Delete an entity...")
table_service.delete_entity('tasktable', 'tasksSeattle', '001')

# Delete a table
print("Delete a table...")
table_service.delete_table('tasktable')

Exemplo n.º 15
0
class AzureOperationsStorage(BasicOperationStorage):
    """
        Implementation of :class:`.interface.IOperationStorage` with Azure Table Storage using the
        default implementation :class:`.interface.BasicOperationStorage`

        On creating a connection to the storage is initialized and all needed
        tables are created. If a purge is necessary, tables are not deleted but simple the content removed.
        Table creation can take a while with Azure Table Storage.

        As Azure Table Storage only supports two indices, the operations are inserted multiple times in different
        tables to enable multi-index queries.
    """
    def get_retry_exceptions(self):
        return (NewConnectionError)

    @retry_auto_reconnect
    def __init__(self, azure_config, purge=False):
        super(AzureOperationsStorage, self).__init__()

        if not azure_config:
            raise Exception("No azure table storage configuration provided!")
        self._azure_config = azure_config

        # ensure defaults
        self._azure_config["operation_table"] = self._azure_config.get(
            "operation_table", "operations")
        self._azure_config["address_table"] = self._azure_config.get(
            "address_table", "address")
        self._azure_config["status_table"] = self._azure_config.get(
            "status_table", "status")
        self._azure_config["balances_table"] = self._azure_config.get(
            "balances_table", "balances")

        if not self._azure_config["account"]:
            raise Exception(
                "Please include the azure account name in the config")
        if not self._azure_config["key"]:
            raise Exception(
                "Please include the azure account key in the config")

        self._service = TableService(
            account_name=self._azure_config["account"],
            account_key=self._azure_config["key"])

        # if tables doesnt exist, create it
        self._create_operations_storage(purge)
        self._create_status_storage(purge)
        self._create_address_storage(purge)
        self._create_balances_storage(purge)

    def _debug_print(self, operation):
        from pprint import pprint
        pprint(operation)

    def _create_address_storage(self, purge):
        _varients = ["balance", "historyfrom", "historyto"]

        for variant in _varients:
            tablename = self._azure_config["address_table"] + variant
            if purge:
                try:
                    for item in self._service.query_entities(tablename):
                        self._service.delete_entity(tablename,
                                                    item["PartitionKey"],
                                                    item["RowKey"])
                except AzureHttpError:
                    pass
                except AzureMissingResourceHttpError:
                    pass
            while not self._service.exists(tablename):
                self._service.create_table(tablename)
                time.sleep(0.1)

    def _create_status_storage(self, purge):
        if purge:
            try:
                tablename = self._azure_config["status_table"]
                for item in self._service.query_entities(tablename):
                    self._service.delete_entity(tablename,
                                                item["PartitionKey"],
                                                item["RowKey"])
            except AzureMissingResourceHttpError:
                pass
        while not self._service.exists(self._azure_config["status_table"]):
            self._service.create_table(self._azure_config["status_table"])
            time.sleep(0.1)

    def _create_balances_storage(self, purge):
        if purge:
            try:
                tablename = self._azure_config["balances_table"]
                for item in self._service.query_entities(tablename):
                    self._service.delete_entity(tablename,
                                                item["PartitionKey"],
                                                item["RowKey"])
            except AzureMissingResourceHttpError:
                pass
        while not self._service.exists(self._azure_config["balances_table"]):
            self._service.create_table(self._azure_config["balances_table"])
            time.sleep(0.1)

    def _create_operations_storage(self, purge):
        self._operation_varients = [
            "incident", "statuscompleted", "statusfailed", "statusinprogress"
        ]  #  "customer"
        self._operation_tables = {}
        for variant in self._operation_varients:
            self._operation_tables[
                variant] = self._azure_config["operation_table"] + variant

        self._operation_prep = {
            "statusinprogress": lambda op: {
                "PartitionKey": self._short_digit_hash(op["chain_identifier"]),
                "RowKey": op["chain_identifier"]
            },
            "statuscompleted": lambda op: {
                "PartitionKey": self._short_digit_hash(op["chain_identifier"]),
                "RowKey": op["chain_identifier"]
            },
            "statusfailed": lambda op: {
                "PartitionKey": self._short_digit_hash(op["chain_identifier"]),
                "RowKey": op["chain_identifier"]
            },
            "customer": lambda op: {
                "PartitionKey": op["customer_id"],
                "RowKey": op["chain_identifier"]
            },
            "incident": lambda op: {
                "PartitionKey": self._short_digit_hash(op["incident_id"]),
                "RowKey": op["incident_id"]
            }
        }
        for variant in self._operation_varients:
            if purge:
                try:
                    tablename = self._operation_tables[variant]
                    for item in self._service.query_entities(tablename):
                        self._service.delete_entity(tablename,
                                                    item["PartitionKey"],
                                                    item["RowKey"])
                except AzureMissingResourceHttpError:
                    pass
            while not self._service.exists(self._operation_tables[variant]):
                self._service.create_table(self._operation_tables[variant])
                time.sleep(0.1)

    def _get_with_ck(self, variant, operation):
        with_ck = operation.copy()
        with_ck.update(self._operation_prep[variant](with_ck))
        return with_ck

    def _short_digit_hash(self, value):
        hash_type = Config.get("operation_storage",
                               "key_hash",
                               "type",
                               default="crc32")

        if hash_type == "crc32":
            short_hash = hex(zlib.crc32(value.encode(encoding='UTF-8')))
            short_hash = short_hash[2:len(short_hash)]

        elif hash_type == "sha256":
            checker = hashlib.sha256()
            checker.update(value.encode(encoding='UTF-8'))
            short_hash = checker.hexdigest()
        return short_hash[0:Config.
                          get("operation_storage", "key_hash", "digits", 3)]

    @retry_auto_reconnect
    def track_address(self, address, usage="balance"):
        address = ensure_address_format(address)
        try:
            short_hash = self._short_digit_hash(address)
            logging.getLogger(__name__).debug("track_address with " +
                                              str(address) + ", hash " +
                                              str(short_hash))
            self._service.insert_entity(
                self._azure_config["address_table"] + usage, {
                    "PartitionKey": short_hash,
                    "RowKey": address,
                    "address": address,
                    "usage": usage
                })
        except AzureConflictHttpError:
            raise AddressAlreadyTrackedException

    @retry_auto_reconnect
    def untrack_address(self, address, usage="balance"):
        address = ensure_address_format(address)
        try:
            short_hash = self._short_digit_hash(address)
            logging.getLogger(__name__).debug("untrack_address with " +
                                              str(address) + ", hash " +
                                              str(short_hash))
            self._service.delete_entity(
                self._azure_config["address_table"] + usage, short_hash,
                address)
            try:
                self._delete_balance(address)
            except AzureMissingResourceHttpError:
                pass
        except AzureMissingResourceHttpError:
            raise AddressNotTrackedException()

    @retry_auto_reconnect
    def _get_address(self, address, usage="balance"):
        try:
            short_hash = self._short_digit_hash(address)
            logging.getLogger(__name__).debug("_get_address with " +
                                              str(address) + ", hash " +
                                              str(short_hash))
            return self._service.get_entity(
                self._azure_config["address_table"] + usage, short_hash,
                address)
        except AzureMissingResourceHttpError:
            raise AddressNotTrackedException()

    def _update(self, operation, status=None):
        try:
            mapping = {
                "in_progress": "statusinprogress",
                "completed": "statuscompleted",
                "failed": "statusfailed"
            }

            operation = self._get_with_ck("incident", operation.copy())
            new_operation = operation
            if status:
                tmp = self.get_operation(operation["incident_id"])
                new_operation["timestamp"] = tmp["timestamp"]
                new_operation["status"] = status
                new_operation = self._get_with_ck("incident", new_operation)

            logging.getLogger(__name__).debug(
                "_update: Table " + self._operation_tables["incident"] +
                " PartitionKey " + new_operation["PartitionKey"] + " " +
                new_operation["RowKey"])

            self._service.update_entity(self._operation_tables["incident"],
                                        new_operation)

            operation = self._get_with_ck("statuscompleted", operation.copy())
            new_operation = operation
            if status:
                tmp = self.get_operation(operation["incident_id"])
                new_operation["timestamp"] = tmp["timestamp"]
                new_operation["status"] = status
                new_operation = self._get_with_ck("statuscompleted",
                                                  new_operation)
            self._service.update_entity(
                self._operation_tables["statuscompleted"], new_operation)

            logging.getLogger(__name__).debug(
                "_update: Table " + self._operation_tables["statuscompleted"] +
                " PartitionKey " + new_operation["PartitionKey"] + " " +
                new_operation["RowKey"])

            if status:
                # needs delete and insert
                try:
                    self._service.delete_entity(
                        self._operation_tables[mapping[operation["status"]]],
                        operation["PartitionKey"], operation["RowKey"])
                except AzureMissingResourceHttpError:
                    pass
                try:
                    self._service.insert_entity(
                        self._operation_tables[mapping[
                            new_operation["status"]]], new_operation)
                except AzureConflictHttpError:
                    # already exists, try update
                    self._service.update_entity(
                        self._operation_tables[mapping[
                            new_operation["status"]]], new_operation)
            else:
                self._service.update_entity(
                    self._operation_tables[mapping[new_operation["status"]]],
                    new_operation)
        except AzureMissingResourceHttpError:
            raise OperationNotFoundException()

    def _insert(self, operation):
        try:
            for variant in self._operation_varients:
                to_insert = operation.copy()
                to_insert.update(self._operation_prep[variant](to_insert))
                if not to_insert["PartitionKey"]:
                    raise AzureMissingResourceHttpError()
                if not to_insert["RowKey"]:
                    raise AzureMissingResourceHttpError()

                logging.getLogger(__name__).debug(
                    "_insert: Table " + self._operation_tables[variant] +
                    " PartitionKey " + to_insert["PartitionKey"] + " " +
                    to_insert["RowKey"])
                self._service.insert_entity(self._operation_tables[variant],
                                            to_insert)
        except AzureConflictHttpError:
            raise DuplicateOperationException()

    def _delete(self, operation):
        try:
            for variant in self._operation_varients:
                to_delete = operation.copy()
                to_delete.update(self._operation_prep[variant](to_delete))
                self._service.delete_entity(self._operation_tables[variant],
                                            to_delete["PartitionKey"],
                                            to_delete["RowKey"])
        except AzureMissingResourceHttpError:
            raise OperationNotFoundException()

    @retry_auto_reconnect
    def flag_operation_completed(self, operation):
        # do basics
        operation = super(AzureOperationsStorage,
                          self).flag_operation_completed(operation)

        self._update(operation, status="completed")

        self._ensure_balances(operation)

    @retry_auto_reconnect
    def flag_operation_failed(self, operation, message=None):
        # do basics
        operation = super(AzureOperationsStorage,
                          self).flag_operation_failed(operation)
        operation["message"] = message
        self._update(operation, status="failed")

    @retry_auto_reconnect
    def insert_operation(self, operation):
        # do basics
        operation = super(AzureOperationsStorage,
                          self).insert_operation(operation)

        error = None
        try:
            self._insert(operation)
        except DuplicateOperationException as e:
            error = e

        try:
            # always check if balances are ok
            if operation["status"] == "completed":
                self._ensure_balances(operation)
        except BalanceConcurrentException as e:
            if error is None:
                error = e

        if error is not None:
            raise error

    @retry_auto_reconnect
    def _delete_balance(self, address, if_match='*'):
        self._service.delete_entity(self._azure_config["balances_table"],
                                    self._short_digit_hash(address),
                                    address,
                                    if_match=if_match)

    @retry_auto_reconnect
    def _ensure_balances(self, operation):
        affected_address = get_tracking_address(operation)
        logging.getLogger(__name__).debug("_ensure_balances: with " +
                                          operation["chain_identifier"] +
                                          " for address " +
                                          str(affected_address))
        try:
            self._get_address(affected_address)
        except AddressNotTrackedException:
            # delte if exists and return
            try:
                self._delete_balance(affected_address)
            except AzureMissingResourceHttpError:
                pass
            return

        try:
            balance_dict = self._service.get_entity(
                self._azure_config["balances_table"],
                self._short_digit_hash(affected_address), affected_address)
            insert = False
        except AzureMissingResourceHttpError as e:
            balance_dict = {"address": affected_address}
            balance_dict["PartitionKey"] = self._short_digit_hash(
                balance_dict["address"])
            balance_dict["RowKey"] = balance_dict["address"]
            insert = True

        if operation["block_num"] < balance_dict.get("blocknum", 0):
            raise BalanceConcurrentException()
        elif operation["block_num"] == balance_dict.get("blocknum", 0) and\
                operation["txnum"] < balance_dict.get("txnum", 0):
            raise BalanceConcurrentException()
        elif operation["block_num"] == balance_dict.get("blocknum", 0) and\
                operation["txnum"] == balance_dict.get("txnum", 0) and\
                operation["opnum"] <= balance_dict.get("opnum", 0):
            raise BalanceConcurrentException()

        balance_dict["blocknum"] = max(balance_dict.get("blocknum", 0),
                                       operation["block_num"])
        balance_dict["txnum"] = max(balance_dict.get("txnum", 0),
                                    operation["tx_in_block"])
        balance_dict["opnum"] = max(balance_dict.get("opnum", 0),
                                    operation["op_in_tx"])
        total = 0

        addrs = split_unique_address(affected_address)
        asset_id = "balance" + operation["amount_asset_id"].split("1.3.")[1]
        if addrs["account_id"] == operation["from"]:
            # internal transfer and withdraw

            # negative
            balance = balance_dict.get(asset_id, 0)

            balance_dict[asset_id] = balance - operation["amount_value"]

            # fee as well
            asset_id = operation["fee_asset_id"]
            balance = balance_dict.get(asset_id, 0)

            balance_dict[asset_id] = balance - operation["fee_value"]
        elif addrs["account_id"] == operation["to"]:
            # deposit

            # positive
            balance = balance_dict.get(asset_id, 0)

            balance_dict[asset_id] = balance + operation["amount_value"]

            # fees were paid by someone else
        else:
            raise InvalidOperationException()

        for key, value in balance_dict.items():
            if key.startswith("balance"):
                total = total + value

        if total == 0:
            if not insert:
                try:
                    self._delete_balance(affected_address,
                                         if_match=balance_dict.etag)
                except AzureMissingResourceHttpError:
                    pass
            return

        # may be updated or inserted, total > 0
        if (insert):
            try:
                self._service.insert_entity(
                    self._azure_config["balances_table"], balance_dict)
            except AzureMissingResourceHttpError:
                raise OperationStorageException(
                    "Critical error in database consistency")
        else:
            try:
                self._service.update_entity(
                    self._azure_config["balances_table"],
                    balance_dict,
                    if_match=balance_dict.etag)
            except AzureConflictHttpError:
                raise OperationStorageException(
                    "Critical error in database consistency")

    @retry_auto_reconnect
    def insert_or_update_operation(self, operation):
        # do basics
        operation = super(AzureOperationsStorage,
                          self).insert_operation(operation)

        # check if this is from in_progress to complete (for withdrawals we need to find incident id as its
        # not stored onchain)
        try:
            logging.getLogger(__name__).debug(
                "insert_or_update_operation: check if in_progress with " +
                str(operation["chain_identifier"]) + " exists")
            existing_operation = self.get_operation_by_chain_identifier(
                "in_progress", operation["chain_identifier"])
            logging.getLogger(__name__).debug(
                "insert_or_update_operation: found existing in_progress operation"
            )
            if not existing_operation["incident_id"] == operation["incident_id"] and\
                    operation["incident_id"] == operation["chain_identifier"]:
                logging.getLogger(__name__).debug(
                    "insert_or_update_operation: using preset incident_id " +
                    str(existing_operation["incident_id"]))
                operation["incident_id"] = existing_operation["incident_id"]
        except OperationNotFoundException:
            existing_operation = None

        if existing_operation is None:
            try:
                logging.getLogger(__name__).debug(
                    "insert_or_update_operation: attempting insert")

                error = None
                try:
                    self._insert(operation)
                except DuplicateOperationException as e:
                    error = e

                try:
                    # always check if balances are ok
                    if operation["status"] == "completed":
                        self._ensure_balances(operation)
                except BalanceConcurrentException as e:
                    if error is None:
                        error = e

                if error is not None:
                    raise error
            except DuplicateOperationException as ex:
                logging.getLogger(__name__).debug(
                    "insert_or_update_operation: fallback to update")
                # could be an update to completed ...
                if operation.get("block_num"):
                    try:
                        operation.pop("status")
                        self.flag_operation_completed(operation)
                    except OperationNotFoundException:
                        raise ex
                else:
                    raise ex
        else:
            logging.getLogger(__name__).debug(
                "insert_or_update_operation: attempting update")
            if operation.get("block_num"):
                try:
                    operation.pop("status")
                    self.flag_operation_completed(operation)
                except OperationNotFoundException:
                    raise ex

    @retry_auto_reconnect
    def delete_operation(self, operation_or_incident_id):
        # do basics
        operation = super(AzureOperationsStorage,
                          self).delete_operation(operation_or_incident_id)

        if type(operation_or_incident_id) == str:
            operation = self.get_operation(operation_or_incident_id)
        else:
            operation = operation_or_incident_id
        self._delete(operation)

    @retry_auto_reconnect
    def get_operation_by_chain_identifier(self, status, chain_identifier):
        mapping = {
            "in_progress": "statusinprogress",
            "completed": "statuscompleted",
            "failed": "statusfailed"
        }
        try:
            operation = self._service.get_entity(
                self._operation_tables[mapping[status]],
                self._short_digit_hash(chain_identifier), chain_identifier)
            operation.pop("PartitionKey")
            operation.pop("RowKey")
            operation.pop("Timestamp")
            operation.pop("etag")
        except AzureMissingResourceHttpError:
            raise OperationNotFoundException()
        return operation

    @retry_auto_reconnect
    def get_operation(self, incident_id):
        try:
            short_hash = self._short_digit_hash(incident_id)
            logging.getLogger(__name__).debug("get_operation with " +
                                              str(incident_id) + ", hash " +
                                              str(short_hash))
            operation = self._service.get_entity(
                self._operation_tables["incident"], short_hash, incident_id)
            operation.pop("PartitionKey")
            operation.pop("RowKey")
            operation.pop("Timestamp")
            operation.pop("etag")
        except AzureMissingResourceHttpError:
            raise OperationNotFoundException()
        return operation

    @retry_auto_reconnect
    def get_balances(self,
                     take,
                     continuation=None,
                     addresses=None,
                     recalculate=False):
        if recalculate:
            raise Exception(
                "Currently not supported due to memo change on withdraw")
            return self._get_balances_recalculate(take, continuation,
                                                  addresses)
        else:
            if continuation is not None:
                try:
                    continuation_marker = json.loads(continuation)
                except TypeError:
                    raise InputInvalidException()
                except JSONDecodeError:
                    raise InputInvalidException()

                balances = self._service.query_entities(
                    self._azure_config["balances_table"],
                    num_results=take,
                    marker=continuation_marker)
            else:
                balances = self._service.query_entities(
                    self._azure_config["balances_table"], num_results=take)
            return_balances = {}
            for address_balance in balances:
                return_balances[address_balance["address"]] = {
                    "block_num": address_balance["blocknum"]
                }
                for key, value in address_balance.items():
                    if key.startswith("balance"):
                        asset_id = "1.3." + key.split("balance")[1]
                        return_balances[
                            address_balance["address"]][asset_id] = value
            return_balances["continuation"] = None
            if balances.next_marker:
                return_balances["continuation"] = json.dumps(
                    balances.next_marker)
            return return_balances

    @retry_auto_reconnect
    def _get_balances_recalculate(self,
                                  take,
                                  continuation=None,
                                  addresses=None):
        address_balances = collections.defaultdict(
            lambda: collections.defaultdict())

        if not addresses:
            if continuation is not None:
                try:
                    continuation_marker = json.loads(continuation)
                except TypeError:
                    raise InputInvalidException()
                except JSONDecodeError:
                    raise InputInvalidException()

                addresses = self._service.query_entities(
                    self._azure_config["address_table"] + "balance",
                    num_results=take,
                    marker=continuation_marker)
            else:
                addresses = self._service.query_entities(
                    self._azure_config["address_table"] + "balance",
                    num_results=take)
            if addresses.next_marker:
                address_balances["continuation"] = json.dumps(
                    addresses.next_marker)
            addresses = [x["address"] for x in addresses]

        if type(addresses) == str:
            addresses = [addresses]

        for address in addresses:
            addrs = split_unique_address(address)
            max_block_number = 0
            for operation in self.get_operations_completed(
                    filter_by={"customer_id": addrs["customer_id"]}):
                this_block_num = operation["block_num"]
                asset_id = operation["amount_asset_id"]
                if addrs["account_id"] == operation["from"]:
                    # negative
                    balance = address_balances[address].get(asset_id, 0)

                    address_balances[address][asset_id] =\
                        balance - operation["amount_value"]

                    # fee as well
                    asset_id = operation["fee_asset_id"]
                    balance = address_balances[address].get(asset_id, 0)

                    address_balances[address][asset_id] =\
                        balance - operation["fee_value"]
                elif addrs["account_id"] == operation["to"]:
                    # positive
                    balance = address_balances[address].get(asset_id, 0)

                    address_balances[address][asset_id] =\
                        balance + operation["amount_value"]
                else:
                    raise InvalidOperationException()
                max_block_number = max(max_block_number, this_block_num)
            if max_block_number > 0:
                address_balances[address]["block_num"] = max_block_number

        # do not return default dicts
        for key, value in address_balances.items():
            if type(value) == collections.defaultdict:
                address_balances[key] = dict(value)
        return dict(address_balances)

    def _parse_filter(self, filter_by):
        if filter_by:
            if filter_by.get("customer_id"):
                return {"customer_id": filter_by.pop("customer_id")}
            if filter_by.get("address"):
                addrs = split_unique_address(filter_by.pop("address"))
                return {"customer_id": addrs["customer_id"]}
            if filter_by.get("from"):
                addrs = split_unique_address(filter_by.pop("from"))
                return {"from": addrs["account_id"]}
            if filter_by.get("to"):
                addrs = split_unique_address(filter_by.pop("to"))
                return {"to": addrs["account_id"]}
            if filter_by:
                raise Exception("Filter not supported")
        return {}

    def _filter_dict_to_string(self, filter_dict, partition_key=None):
        filter_str = None
        for key, value in filter_dict.items():
            if partition_key == key:
                key = "PartitionKey"
            if filter_str is not None:
                delimiter = " and "
            delimiter = ""
            filter_str = delimiter + key + " eq '" + value + "'"
        return filter_str

    @retry_auto_reconnect
    def get_operations_in_progress(self, filter_by=None):
        mapping = {
            "in_progress": "statusinprogress",
            "completed": "statuscompleted",
            "failed": "statusfailed"
        }

        filter_dict = {}
        filter_dict.update(self._parse_filter(filter_by))

        filter_str = self._filter_dict_to_string(filter_dict, "status")

        return list(
            self._service.query_entities(
                self._operation_tables[mapping["in_progress"]], filter_str))

    @retry_auto_reconnect
    def get_operations_completed(self, filter_by=None):
        mapping = {
            "in_progress": "statusinprogress",
            "completed": "statuscompleted",
            "failed": "statusfailed"
        }

        filter_dict = {}
        filter_dict.update(self._parse_filter(filter_by))

        filter_str = self._filter_dict_to_string(filter_dict, "status")

        return list(
            self._service.query_entities(
                self._operation_tables[mapping["completed"]], filter_str))

    @retry_auto_reconnect
    def get_operations_failed(self, filter_by=None):
        mapping = {
            "in_progress": "statusinprogress",
            "completed": "statuscompleted",
            "failed": "statusfailed"
        }

        filter_dict = {}
        filter_dict.update(self._parse_filter(filter_by))

        filter_str = self._filter_dict_to_string(filter_dict, "status")

        return list(
            self._service.query_entities(
                self._operation_tables[mapping["failed"]], filter_str))

    @retry_auto_reconnect
    def get_last_head_block_num(self):
        try:
            document = self._service.get_entity(
                self._azure_config["status_table"], "head_block_num", "last")
            return document["last_head_block_num"]
        except AzureMissingResourceHttpError:
            return 0

    @retry_auto_reconnect
    def set_last_head_block_num(self, head_block_num):
        current_last = self.get_last_head_block_num()
        if current_last >= head_block_num:
            raise Exception("Marching backwards not supported! Last: " +
                            str(current_last) + " New: " + str(head_block_num))
        self._service.insert_or_replace_entity(
            self._azure_config["status_table"], {
                "PartitionKey": "head_block_num",
                "RowKey": "last",
                "last_head_block_num": head_block_num
            })
Exemplo n.º 16
0
config = configparser.ConfigParser()
config.read('app.ini')

identifier = config['App']['Identifier']
account_name = identifier + "-cosmos-db"
account_key = config['App']['AccountKey']
table_endpoint = "https://" + identifier + "-cosmos-db.table.cosmos.azure.com:443/"
connection_string = "DefaultEndpointsProtocol=https;AccountName=" + account_name + ";AccountKey=" + account_key + ";TableEndpoint=" + table_endpoint + ";"
table_service = TableService(endpoint_suffix="table.cosmos.azure.com",
                             connection_string=connection_string)
table_name = identifier + "-cosmos-table"

# Start over
items = table_service.query_entities(table_name)
for itm in items:
    table_service.delete_entity(table_name, itm.PartitionKey, itm.RowKey)

# Add new entries
item = Entity()
item.PartitionKey = "required-modules"
item.RowKey = "custom-vnet"
table_service.insert_entity('kevincx-cosmos-table', item)

item = Entity()
item.PartitionKey = "required-modules"
item.RowKey = "custom-sg"
table_service.insert_entity('kevincx-cosmos-table', item)

item = Entity()
item.PartitionKey = "required-modules"
item.RowKey = "custom-blob"
Exemplo n.º 17
0
class AzureCosmosDb(TableStorage):
    """Azure CosmosDB provider for Table Storage."""
    def __init__(self, config: AzureCosmosDbConfig):
        self._table_service = TableService(account_name=config.account_name,
                                           account_key=config.account_key)
        self._tableName = config.table_name

    def check_entry_exists(self, entry):
        """Check if entry exists in table.

        :param entry: Dictionary with PartitionKey and RowKey fields

        :return: True if entry exists
        """
        try:
            self.query(entry['PartitionKey'], entry['RowKey'])
            return True
        except Exception:
            return False

    def write(self, resource):
        """Write resource to table.

        :param resource: Expecting Resource object
            (see Common.Contracts.Resource)

        :return: None
        """
        entry = resource.to_dict()
        prepared = entry_storage.EntryOperations.prepare_entry_for_insert(
            entry)

        if not self.check_entry_exists(prepared):
            self._table_service.insert_entity(self._tableName, prepared)
        else:
            self._table_service.update_entity(self._tableName, prepared)

    def query(self, partition_key, row_key):
        """Get entry with specified partition and row keys.

        :param partition_key: Partition key for entry
        :param row_key: Row key for entry

        :return: Entity if found, None otherwise
        """
        task = self._table_service.get_entity(self._tableName, partition_key,
                                              row_key)
        return task

    def query_list(self):
        """Get entities from table.

        :return: List of entities from table
        """
        return self._table_service.query_entities(self._tableName)

    def delete(self, partition_key, row_key):
        """Delete entry with specified partition and row keys.

        :param partition_key: Partition key for entry
        :param row_key: Row key for entry

        :return: None
        """
        self._table_service.delete_entity(self._tableName, partition_key,
                                          row_key)

    @staticmethod
    def create():
        """Initialize AzureCosmosDb service.

        :return: AzureCosmosDb service object
        """
        config = AzureConfig()
        cosmos_storage = AzureCosmosDb(config.cosmos_storage_config)

        return cosmos_storage
Exemplo n.º 18
0
class AzureStorageHandler():
    def __init__(self, kv):
        try:
            self.table_service = TableService(
                account_name=kv.get_keyvault_secret("storageAccount-name"),
                account_key=kv.get_keyvault_secret("storageAccount-key"))
            # Quick start code goes here
        except Exception as ex:
            print('Exception:')
            print(ex)

    def insert_submission_entry(self, entry):
        submission = Entity()

        submission.PartitionKey = entry.subreddit
        submission.RowKey = entry.id
        submission.author = entry.author
        submission.created_utc = entry.created_utc
        submission.flair = entry.flair
        submission.title = entry.title

        # Flatten list of keywords into comma separated string
        submission.title_keywords = ','.join(map(str, entry.title_keywords))
        submission.title_sentiment = entry.title_sentiment
        try:
            submission.body_keywords = ','.join(map(str, entry.body_keywords))
            submission.body_sentiment = entry.body_sentiment
        except AttributeError:
            submission.body_keywords = ""
            submission.body_sentiment = ""

        self.table_service.insert_entity('submissions', submission)

    def insert_comment_entry(self, entries):

        for entry in entries:
            comment = Entity()

            comment.PartitionKey = entry.link_id
            comment.RowKey = entry.id
            comment.author = entry.author
            comment.body = entry.body
            comment.created_utc = entry.created_utc
            comment.parent_id = entry.parent_id
            comment.score = entry.score
            comment.subreddit = entry.subreddit
            comment.subreddit_id = entry.subreddit_id
            comment.total_awards_received = entry.total_awards_received
            comment.sentiment = entry.sentiment

            # Flatten list of keywords into comma separated string
            comment.keywords = ','.join(map(str, entry.keywords))

            self.table_service.insert_entity('comments', comment)

    def insert_recommendation_entry(self, entries):

        for entry in entries:
            recommendation = Entity()

            recommendation.PartitionKey = "{0}_{1}".format(
                entry.subreddit, entry.query_word)
            recommendation.RowKey = entry.keyword
            recommendation.subreddit = entry.subreddit
            recommendation.query_word = entry.query_word
            recommendation.post_id = ','.join(map(str, entry.post_id))
            recommendation.comment_id = ','.join(map(str, entry.comment_id))
            recommendation.sentiment = entry.sentiment
            recommendation.count = entry.count

            try:
                self.table_service.insert_entity('recommendations',
                                                 recommendation)
            except AzureConflictHttpError as error:
                # print(error)
                subreddit_query_word = recommendation.PartitionKey.split('_')
                print(
                    "The recommendation entry with subreddit = '{0}', search term = '{1}', and keyword = '{2}' already exists in the database. Updating it..."
                    .format(subreddit_query_word[0], subreddit_query_word[1],
                            recommendation.RowKey))
                self.table_service.update_entity('recommendations',
                                                 recommendation)

    def insert_sub_date_entry(self, entry):
        sub_date = Entity()

        sub_date.PartitionKey = entry.subreddit
        sub_date.RowKey = entry.title
        sub_date.created_utc = entry.created_utc
        sub_date.post_id = entry.post_id

        try:
            self.table_service.insert_or_replace_entity(
                'mostrecentsubdate', sub_date)
        except TypeError as error:
            print(error)
            print(
                f"The mostrecentsubdate object is formatted incorrectly and was not updated. One of the parameters is not an int, str, bool or datetime, or defined custom EntityProperty. Continuing..."
            )

    def get_entry(self, table, partition_key, row_key):
        return self.table_service.get_entity(table, partition_key, row_key)

    def filter_entries(self, table, filter_string):
        return self.table_service.query_entities(table, filter_string)

    def update_entry(self, table, entity):
        return self.table_service.update_entity(table, entity)

    def delete_entry(self, table, partition_key, row_key):
        return self.table_service.delete_entity(table, partition_key, row_key)
def main(req: func.HttpRequest) -> func.HttpResponse:
    logging.info("Python HTTP trigger function processed a request.")

    def get_param(req: func.HttpRequest, param: str) -> str:
        param_value = req.params.get(param)
        if not param_value:
            try:
                req_body = req.get_json()
            except ValueError:
                pass
            else:
                param_value = req_body.get(param)
        return param_value

    path = get_param(req, "path")

    logging.info(
        f"Parsed the following parameters from the HTTP request: path={path}.")

    if path:
        rowKey = path
        partitionKey = "links"
        tableName = "links"

        # connect to table
        storage_account_name = os.environ["STORAGE_ACCOUNT_NAME"]
        storage_account_key = os.environ["STORAGE_ACCOUNT_KEY"]

        table_service = TableService(account_name=storage_account_name,
                                     account_key=storage_account_key)

        # try to delete the entity - if it does not exist, throw
        try:
            table_service.delete_entity(tableName, partitionKey, rowKey)
            logging.info(
                f"Deleted the following from Azure Table Storage: PartitionKey={partitionKey}, RowKey={rowKey}"
            )
            resp = {
                "Status":
                "Ok",
                "Message":
                f"Deleted the following from Azure Table Storage: PartitionKey={partitionKey}, RowKey={rowKey}",
            }
        except:
            logging.info(
                f"Skipped deletion of the following from Azure Table Storage as the entity does not exist: PartitionKey={partitionKey}, RowKey={rowKey}"
            )
            resp = {
                "Status":
                "Ok",
                "Message":
                f"Skipped deletion of the following from Azure Table Storage as it does not exist: PartitionKey={partitionKey}, RowKey={rowKey}",
            }
            pass

        resp_json = json.dumps(resp)

        return func.HttpResponse(body=resp_json, mimetype="application/json")
    else:
        resp = {
            "Status": "Error",
            "Message": "Did not delete any entities from Azure Table Storage",
        }

        resp_json = json.dumps(resp)

        return func.HttpResponse(body=resp_json,
                                 mimetype="application/json",
                                 status_code=400)
Exemplo n.º 20
0
class AzureTableDatabase(object):
    def __init__(self):
        self.connection = TableService(account_name=storage_account, account_key=table_connection_string)
        self.table_name = table_name

    def _update_entity(self, record):
        record.LastModified = datetime.now()
        self.connection.update_entity(self.table_name, record)

    def create_table(self):
        if not self.connection.exists(self.table_name):
            self.connection.create_table(self.table_name)

    def raw_table(self, limit=100):
        """
        Retrieve a list of rows in the table.
        """
        calls = self.connection.query_entities(self.table_name, num_results=limit)
        return calls

    def list_calls(self, limit=100, select='PartitionKey'):
        """
        Retrieve a set of records that need a phone call
        """

        calls = self.connection.query_entities(self.table_name, num_results=limit, select=select)
        return [c.PartitionKey for c in calls]

    def reset_stale_calls(self, time_limit):
        """
        Retrieve calls that are not done and whose last modified time was older than the limit.
        """
        records = self.connection.query_entities(self.table_name, filter="LastModified lt datetime'{0}' and Status ne '{1}'".format(time_limit.date(), Statuses.extracting_done))
        if not records.items:
            raise NoRecordsToProcessError()
        num_records = len(records.items)

        for record in records:
            if 'LastErrorStep' in record:
                record.Status = record.LastErrorStep
                del record.LastErrorStep
            record.Status = Statuses.reset_map.get(record.Status, record.Status)
            self._update_entity(record)

        return num_records

    def retrieve_next_record_for_call(self):
        """
        Retrieve a set of records that need a phone call
        """

        records = self.connection.query_entities(self.table_name, num_results=1, filter="Status eq '{0}'".format(Statuses.new))

        if len(records.items) == 0:
            raise NoRecordsToProcessError()

        record = records.items[0]
        record.Status = Statuses.calling
        self._update_entity(record)

        return record.PartitionKey

    def set_error(self, partition_key, step):
        """ Reset a row from error state
        """
        record = self.connection.get_entity(self.table_name, partition_key, partition_key)
        record.Status = Statuses.error
        record['LastErrorStep'] = step
        self._update_entity(record)

    def retrieve_next_record_for_transcribing(self):
        records = self.connection.query_entities(
            self.table_name,
            num_results=1,
            filter="Status eq '{0}'".format(Statuses.recording_ready),
        )
        if not records.items:
            raise NoRecordsToProcessError()

        record = records.items[0]
        record.Status = Statuses.transcribing
        self._update_entity(record)

        return record.CallUploadUrl, record.PartitionKey

    def update_transcript(self, partition_key, transcript, status):
        record = self.connection.get_entity(
            self.table_name,
            partition_key,
            partition_key,
        )
        if status == TranscriptionStatus.success:
            record.CallTranscript = transcript
            record.Status = Statuses.transcribing_done
            record.TranscribeTimestamp = datetime.now()
            self._update_entity(record)
        elif status == TranscriptionStatus.request_error:
            self.set_error(partition_key, Statuses.transcribing)
        else:
            record.Status = Statuses.transcribing_failed
            self._update_entity(record)

    def change_status(self, original_status, new_status):
        records = self.connection.query_entities(
            self.table_name,
            filter="Status eq '{0}'".format(original_status),
        )
        if not records.items:
            return

        for record in records.items:
            record.Status = new_status
            self.connection.update_entity(self.table_name, record)

    def query(self, column, value, limit=1):
        records = self.connection.query_entities(self.table_name,
              num_results=limit, filter="{0} eq '{1}'".format(column, value))
        return records

    def retrieve_next_record_for_extraction(self):
        records = self.connection.query_entities(self.table_name, num_results=1, filter="Status eq '{0}'".format(Statuses.transcribing_done))
        if not records.items:
            raise NoRecordsToProcessError()

        record = records.items[0]
        record.Status = Statuses.extracting
        self._update_entity(record)

        return record.CallTranscript, record.PartitionKey

    def retrieve_next_record_for_extraction(self):
        records = self.connection.query_entities(self.table_name, num_results=1, filter="Status eq '{0}'".format(Statuses.transcribing_done))
        if not records.items:
            raise NoRecordsToProcessError()

        record = records.items[0]
        record.Status = Statuses.extracting
        self.connection.update_entity(self.table_name, record)

        return record.CallTranscript, record.PartitionKey

    def update_location_date(self, case_number, city, location_confidence, state, zipcode, date):
        record = self.connection.get_entity(self.table_name, case_number, case_number)
        record.City = city
        record.LocationConfidence = location_confidence
        record.State = state
        record.Zipcode = zipcode
        record.CourtHearingDate = date
        record.Status = Statuses.extracting_done
        self.connection.update_entity(self.table_name, record)

    def upload_new_requests(self, request_ids):
        """
        Upload new request ids to the database
        """

        for request_id in request_ids:
            record = {'PartitionKey': request_id, 'RowKey': request_id, 'Status': Statuses.new, 'LastModified': datetime.now()}
            try:
                self.connection.insert_entity(self.table_name, record)
            except AzureConflictHttpError:
                pass  # already exists. silently ignore.

    def update_call_id(self, alien_registration_id, call_id):
        record = self.connection.get_entity(self.table_name, alien_registration_id, alien_registration_id)
        record.CallID = call_id
        record.Status = Statuses.calling
        record.CallTimestamp = datetime.now()
        self._update_entity(record)

    def update_azure_path(self, alien_registration_id, azure_path):
        record = self.connection.get_entity(self.table_name, alien_registration_id, alien_registration_id)
        record.Status = Statuses.recording_ready
        record.CallUploadUrl = azure_path
        self._update_entity(record)

    def delete_ain(self, ain):
        return self.connection.delete_entity(self.table_name, ain, ain)

    def get_ain(self, ain):
        return self.connection.get_entity(self.table_name, ain, ain)
Exemplo n.º 21
0
    def test_list_releases(self, mock_now, mock_list_containers):
        # Mock time
        mock_now.return_value = pendulum.datetime(year=2020,
                                                  month=5,
                                                  day=1,
                                                  hour=1)

        # Mock containers
        containers_in = make_containers()
        mock_list_containers.return_value = containers_in

        # Mock fetching of containers
        client = MagArchiverClient(account_name=self.account_name,
                                   account_key=self.account_key)
        containers = client.list_containers(last_modified_thresh=1)

        try:
            # Update releases based on containers
            num_updated, num_errors = client.update_releases(containers)
            self.assertEqual(num_updated, 3)
            self.assertEqual(num_errors, 0)

            # Two releases
            start_date = pendulum.datetime(year=2020, month=4, day=17)
            end_date = pendulum.datetime(year=2020, month=5, day=1)
            releases = client.list_releases(start_date=start_date,
                                            end_date=end_date,
                                            state=MagState.discovered,
                                            date_type=MagDateType.release)
            self.assertEqual(len(releases), 2)

            # 1 release
            start_date = pendulum.datetime(year=2020,
                                           month=4,
                                           day=17,
                                           minute=1)
            end_date = pendulum.datetime(year=2020, month=5, day=1)
            releases = client.list_releases(start_date=start_date,
                                            end_date=end_date,
                                            state=MagState.discovered,
                                            date_type=MagDateType.release)
            self.assertEqual(len(releases), 1)

            # Three releases
            start_date = pendulum.datetime(year=2020, month=4, day=17)
            end_date = pendulum.datetime(year=2020, month=5, day=1, minute=1)
            releases = client.list_releases(start_date=start_date,
                                            end_date=end_date,
                                            state=MagState.discovered,
                                            date_type=MagDateType.release,
                                            reverse=False)
            self.assertEqual(len(releases), 3)

            # Sorting reverse=False
            self.assertEqual(releases[0].row_key, '2020-04-17')
            self.assertEqual(releases[1].row_key, '2020-04-24')
            self.assertEqual(releases[2].row_key, '2020-05-01')

            # Sorting reverse=True
            releases = client.list_releases(start_date=start_date,
                                            end_date=end_date,
                                            state=MagState.discovered,
                                            date_type=MagDateType.release,
                                            reverse=True)
            self.assertEqual(releases[0].row_key, '2020-05-01')
            self.assertEqual(releases[1].row_key, '2020-04-24')
            self.assertEqual(releases[2].row_key, '2020-04-17')

        finally:
            # Clean up
            service = TableService(account_name=self.account_name,
                                   account_key=self.account_key)
            for container in containers:
                service.delete_entity(MagRelease.TABLE_NAME, 'mag',
                                      container.name.replace("mag-", ""))
Exemplo n.º 22
0
class Db(object):

    ts = None

    def __init__(self):
        """Init connection with cosmosdb"""
        self.ts = TableService(account_name=ACCOUNT_NAME,
                               account_key=ACCOUNT_KEY)

    def migrate(self):
        """ Create tabel if not exists"""
        if not self.ts.exists(USER_TABLE_NAME):
            self.ts.create_table(USER_TABLE_NAME)

        if not self.ts.exists(MESSAGE_TABLE_NAME):
            self.ts.create_table(MESSAGE_TABLE_NAME)

    def get_all_users(self):
        """select email from user"""
        return [
            i['PartitionKey'] for i in self.ts.query_entities(USER_TABLE_NAME)
        ]

    def create_user(self, data=None):
        bjson = ep(
            EdmType.BINARY,
            dumps({
                'email':
                data['email'],
                'password':
                sha224(bytes(data['password'], encoding='utf-8')).hexdigest(),
                'full_name':
                data['full_name']
            }))

        user = {
            'PartitionKey': data['email'],
            'RowKey': row_key,
            'info': bjson
        }

        if (self.ts.insert_or_replace_entity(USER_TABLE_NAME, user)):
            return {'success': True}

    def delete_user(self, email=None):
        if (self.ts.delete_entity(USER_TABLE_NAME, email, row_key)):
            return {'success': True}

    def create_message(self, email=None, message=None):
        """ Create message in protobuf"""
        proto_message = message_pb2.Message()
        proto_message.title = message['title']
        proto_message.content = message['content']
        proto_message.magic_number = message['magic_number']
        details = ep(EdmType.BINARY, str(proto_message))

        bmessage = {
            'PartitionKey': email,
            'RowKey': row_key,
            'details': details,
        }

        if (self.ts.insert_or_replace_entity(MESSAGE_TABLE_NAME, bmessage)):
            return {'success': True}

    def get_user(self, email=''):
        return self.ts.get_entity(USER_TABLE_NAME, email, row_key)

    def get_message(self, email=''):
        return self.ts.get_entity(MESSAGE_TABLE_NAME, email, row_key)

    def get_messages(self):
        messages = self.ts.query_entities(MESSAGE_TABLE_NAME)
        return list(messages)