Пример #1
0
def validate_user_in_template(userId, templateId):
    table_service = TableService(
        connection_string=os.environ['AZURE_STORAGE_CONNECTION_STRING'])
    try:
        table_service.get_entity('user', templateId, userId)
    except:
        abort(403, description="user_id not found in template_id")
Пример #2
0
def validate_submission_in_template():
    templateId = request.form['template_id']
    submissionId = request.form['submission_id']
    table_service = TableService(
        connection_string=os.environ['AZURE_STORAGE_CONNECTION_STRING'])
    try:
        table_service.get_entity('submission', templateId, submissionId)
    except:
        abort(400, description="submission_id not found in template_id")
def throttle_ip_requests(ip_entry):
    max_from_single_ip = 5
    table_service = TableService(account_name=os.environ['STORAGE_ACCOUNT'],
                                 account_key=os.environ['STORAGE_KEY'])
    table_service.create_table(
        table_name=os.environ['BILLING_TABLE'])  #create if it doesn't exist
    ip_row = None
    try:
        ip_row = table_service.get_entity(os.environ['BILLING_TABLE'],
                                          ip_entry['PartitionKey'],
                                          ip_entry['RowKey'])
    except:
        pass
    if not ip_row:
        ip_entry['count'] = 1
        table_service.insert_entity(table_name=os.environ['BILLING_TABLE'],
                                    entity=ip_entry)
        ip_row = ip_entry
    else:
        lastdatetime = datetime.strptime(ip_row['time'], "%d/%m/%Y %H:%M:%S")
        currdatetime = datetime.strptime(ip_entry['time'], "%d/%m/%Y %H:%M:%S")
        tdelta = currdatetime - lastdatetime
        if tdelta.days < 1 and ip_row['count'] > max_from_single_ip:
            return True  # throttle this entry..
        elif tdelta.days > 0:  #over 1 day has passed, update the count to 1 and reset time
            ip_row['count'] = 1
            ip_row['time'] = currdatetime.strftime("%d/%m/%Y %H:%M:%S")
            table_service.update_entity(os.environ['BILLING_TABLE'], ip_row)
        else:  # less than 1 day but count is < max_from_single_ip, update the count
            ip_row['count'] = ip_row['count'] + 1
            table_service.update_entity(os.environ['BILLING_TABLE'], ip_row)

    # However we got here, do not throttle
    return False
def main(req: func.HttpRequest) -> func.HttpResponse:
    logging.info("Python HTTP trigger function processed a request.")

    path = req.route_params.get("path")
    if path is None:
        return func.HttpResponse(body=f"The url requested does not exist!",
                                 status_code=400)
    else:

        # connect to table
        storage_account_name = os.environ["STORAGE_ACCOUNT_NAME"]
        storage_account_key = os.environ["STORAGE_ACCOUNT_KEY"]

        table_service = TableService(account_name=storage_account_name,
                                     account_key=storage_account_key)

        # get longurl
        try:
            row = table_service.get_entity("links", "links", path)
        except:
            return func.HttpResponse(
                body=f"The url requested - {path} - does not exist!",
                status_code=400)
        else:
            longurl = row["LongUrl"]

            return func.HttpResponse(headers={"location": longurl},
                                     status_code=302)
Пример #5
0
class AzureCosmosDb(TableStorage):
    def __init__(self, config: AzureCosmosDbConfig):
        self._tableService = TableService(account_name=config.account_name,
                                          account_key=config.account_key)
        self._tableName = config.table_name

    def check_entry_exists(self, entry):
        try:
            self.query(entry['PartitionKey'], entry['RowKey'])
            return True
        except:
            return False

    def write(self, entry):
        prepared = entry_storage.EntryOperations.prepare_entry_for_insert(
            entry)

        if not self.check_entry_exists(prepared):
            self._tableService.insert_entity(self._tableName, prepared)
        else:
            self._tableService.update_entity(self._tableName, prepared)

    def query(self, partitionkey, rowkey):
        task = self._tableService.get_entity(self._tableName, partitionkey,
                                             rowkey)
        return task

    def delete(self, partitionkey, rowkey):
        self._tableService.delete_entity(self._tableName, partitionkey, rowkey)
Пример #6
0
class AzureTable():
    def __init__(self, account_name, account_key):
        self.table_service = TableService(account_name=account_name,
                                          account_key=account_key)

    def create_table(self, table_name):
        return self.table_service.create_table(table_name)

    def exists_table(self, table_name):
        return self.table_service.exists(table_name)

    def insert_or_replace_entity(self, table_name, partition_key, row_key,
                                 **kwargs):
        try:
            entity = self.table_service.get_entity(table_name, partition_key,
                                                   row_key)
        except Exception:
            # Insert a new entity
            entity = {'PartitionKey': partition_key, 'RowKey': row_key}

        for (k, v) in kwargs.items():
            entity[k] = v

        return self.table_service.insert_or_replace_entity(table_name, entity)

    def insert_or_replace_entity2(self, table_name, entity):
        return self.table_service.insert_or_replace_entity(table_name, entity)

    def insert_entity(self, table_name, entity):
        return self.table_service.insert_entity(table_name, entity)

    def update_entity(self, table_name, entity):
        return self.table_service.update_entity(table_name, entity)

    def get_entity(self, table_name, partition_key, row_key):
        return self.table_service.get_entity(table_name, partition_key,
                                             row_key)

    def delete_entity(self, table_name, partition_key, row_key):
        self.table_service.delete_entity(table_name, partition_key, row_key)

    def delete_table(self, table_name):
        return self.table_service.delete_table(table_name)

    def get_entities(self, table_name, partition_key):
        filter = "PartitionKey eq '{0}'".format(partition_key)
        return self.table_service.query_entities(table_name, filter)
Пример #7
0
def get_widget_telemetry(tbl_cnxn : TableService, widget : Widget):
    """
    takes a widget and appends the telemetry to it.
    """
    partition_key = str(widget.factory_id) + ":" + str(widget.line_id)
    row_key = str(widget.factory_id) + "-" + str(widget.serial_number)
    telemetry = tbl_cnxn.get_entity("Predictions", partition_key, row_key)
    return telemetry
Пример #8
0
def main(req: func.HttpRequest, ipListRowIn, ipListRowOut) -> func.HttpResponse:
    logging.info('Python HTTP trigger function processed a request.')

    # read request body
    reqMessage = req.get_json()
    logging.debug(reqMessage)

    # if available, load previous ip addressess from storage
    previousIpAddresses = None
    dataRow = json.loads(ipListRowIn) 
    if len(dataRow) > 0:    
        previousIpAddresses = json.loads(dataRow[0]['IpAddresses'])
    
    # read env vars
    tenantId = os.getenv("TENANT_ID")
    clientId = os.getenv("CLIENT_ID")
    clientSecret = os.getenv("CLIENT_SECRET")
    tableName = os.getenv("TABLE_NAME")
    entityRowKey = os.getenv("ENTITY_ROW_KEY")
    storageTableConnString = os.getenv("STORAGE_TABLE_CONN_STRING")

    # obtain access token address from Azure AD
    adservice = AzureAdService.AzureAdService(logging)
    accessToken = adservice.getAccessToken(tenantId, clientId, clientSecret)
    logging.debug('Azure AD access token: {0}'.format(accessToken))

    # obtain current possible IP addresses from web/func app
    appservice = WebAppService.WebAppService(logging)
    possibleOutboundIps = appservice.getPossibleIpAddresses(accessToken, reqMessage['webAppSubscriptionId'], reqMessage['webAppResourceGroup'], reqMessage['webAppName'])
    
    # convert to json
    possibleOutboundIps = possibleOutboundIps.split(',')
    logging.debug('ip addresses: {0}'.format(possibleOutboundIps))
    
    analysisservice = AzureAnalysisService.AzureAnalysisService(logging)
    analysisservice.updateFirewallSettings(
        accessToken, 
        previousIpAddresses, 
        possibleOutboundIps, 
        AnalysisServerModel.AnalysisServerModel(reqMessage['analysisServerSubscriptionId'], reqMessage['analysisServerResourceGroup'], reqMessage['analysisServerName']))
    
    # upsert table row
    if previousIpAddresses is None:
        data = {
            "PartitionKey": entityRowKey,
            "RowKey": entityRowKey,
            "IpAddresses": possibleOutboundIps
        }
        # insert using table storage output binding
        ipListRowOut.set(json.dumps(data))
    else:
        # update existing row using table storage client library
        table_service = TableService(connection_string=storageTableConnString)
        table_row = table_service.get_entity(tableName, entityRowKey, entityRowKey)
        table_row.IpAddresses = json.dumps(possibleOutboundIps)
        table_service.update_entity(tableName, table_row)
    
    return func.HttpResponse('done', status_code=200)
Пример #9
0
def tableStorage(table_name, partition_key, row_key, hins_processed, timesaved,
                 time_by_system, time_by_user, requests):

    try:
        table_service = TableService(
            account_name=config.AZURE['STORAGE_ACCOUNT_NAME'],
            account_key=config.AZURE['STORAGE_ACCOUNT_KEY'])

        entity = {
            'PartitionKey': partition_key,
            'RowKey': row_key,
            'HinsProcessed': hins_processed,
            'TimeSaved': timesaved,
            'TimeBySystem': time_by_system,
            'TimeByUser': time_by_user,
            'Requests': requests
        }

        if not table_service.exists(table_name, timeout=None):
            table_service.create_table(table_name, fail_on_exist=False)

        try:
            table_service.insert_entity(table_name, entity)
            print("Entity Doesn't Exist")
            print("Creating Entity\n")
        except Exception as e:
            print("Entity Exists")
            print("Updating entity\n")

            currentEntity = table_service.get_entity(table_name, partition_key,
                                                     row_key)
            tempHinProcessed = currentEntity.HinsProcessed + hins_processed
            tempTimeSaved = currentEntity.TimeSaved + timesaved
            tempTimeBySystem = currentEntity.TimeBySystem + time_by_system
            tempTimeByUser = currentEntity.TimeByUser + time_by_user
            tempRequest = currentEntity.Requests + requests

            entity = {
                'PartitionKey': partition_key,
                'RowKey': row_key,
                'HinsProcessed': tempHinProcessed,
                'TimeSaved': tempTimeSaved,
                'TimeBySystem': tempTimeBySystem,
                'TimeByUser': tempTimeByUser,
                'Requests': tempRequest
            }

            table_service.update_entity(table_name,
                                        entity,
                                        if_match='*',
                                        timeout=None)

    except Exception as e:
        print(e)
Пример #10
0
def get_object(row_key,
               table_name='default',
               account_name='cloudmaticafunc9b4c',
               account_key='YOUR_KEY',
               partition_key='default'):
    table_service = TableService(account_name=account_name,
                                 account_key=account_key)
    obj = table_service.get_entity(table_name=table_name,
                                   partition_key=partition_key,
                                   row_key=row_key)
    return obj
Пример #11
0
def userpage(variable):
    table_service = TableService(account_name=app.config['StorageName'],
                                 account_key=app.config['StorageKey'])
    name = variable.lower()
    try:
        details = table_service.get_entity('weddingtable', 'Invites', name)
        print(details)
        return render_template("user.html",
                               People1=details.Names,
                               People2=details.Names2,
                               hide=details.Hide,
                               userCode=variable,
                               commentmessage=details.Message)
    except:
        return redirect('/badCode')
Пример #12
0
def get_result(partition_key, row_key, tbl_svc_retry=None, **kwargs):
    """ Retirieves a result from the ``rosiepi`` storage table.

    :param: partition_key: The ``PartitionKey`` of the entity
    :param: row_key: The ``RowKey`` of the entity
    :param: **kwargs: Any additional kwargs to pass onto the
                      ``TableService.get_entity()`` function.

    :return: The ``TableService.Entity``, or None if the Entity
             isn't found.
    """

    response = None

    table = TableService(connection_string=os.environ['APP_STORAGE_CONN_STR'])
    if tbl_svc_retry is not None:
        table.retry = tbl_svc_retry

    # ensure RowKey is properly padded
    padding = '0'*(50 - len(row_key))
    row_key = f'{padding}{row_key}'

    try:
        response = table.get_entity('rosiepi', partition_key, row_key, **kwargs)
    except Exception as err:
        logging.info(f'Failed to get result from rosiepi table. Error: {err}')
        raise

    logging.info(f'TableService.Entity retrieved: {response}')

    # Flatten the entity
    json_data_copy = response.get('json_data')
    if json_data_copy:
        del response['json_data']
        try:
            flat_data = json.loads(json_data_copy)
            response.update(flat_data)
        except Exception as err:
            logging.info('Failed to flatten TableService.Entity.')
            raise

    for item in IGNORED_ITEMS:
        if item in response:
            del response[item]
    
    return response
Пример #13
0
def main(req: func.HttpRequest) -> func.HttpResponse:
    logging.info('Python HTTP Submit trigger received a request')

    logging.debug('Creating blob service')

    headers_dict = {
            "Access-Control-Allow-Credentials": "true",
            "Access-Control-Allow-Origin": "*",
            "Access-Control-Allow-Methods": "Post"
    }
    schema = put_schema.DoneSchema()
    try:
        done_dict = schema.loads(req.get_body())
    except ValidationError:
        error = f'Failed to validate the done message'
        return func.HttpResponse(error,
                                 headers=headers_dict,
                                 status_code=400
                                 )

    table_name = os.getenv('AZURE_TABLE_NAME')
    table_service = TableService(
        account_name=os.getenv('AZURE_STORAGE_ACCOUNT'),
        account_key=os.getenv('AZURE_STORAGE_ACCESS_KEY')
    )
    try:
        entity = table_service.get_entity(table_name, 'processing', done_dict['guid'])
    except AzureMissingResourceHttpError:
        error = f'Failed to put done message'
        return func.HttpResponse(error,
                                 headers=headers_dict,
                                 status_code=400
                                 )
    if not done_dict['error']:
        entity.Error = ""
    else:
        entity.Error = done_dict['error']
    table_service.delete_entity(table_name, entity.PartitionKey, entity.RowKey)
    entity.PartitionKey = 'done'
    table_service.insert_entity(table_name, entity)

    return func.HttpResponse('Message was successfully inserted into Done queue',
                             headers=headers_dict
                             )
Пример #14
0
class State:
    def __init__(self):
        with open('./vardhamanbot/bot/config/app.json') as app_config_file:
            app_config = json.load(app_config_file)
        self.tableservice = TableService(app_config["ats_name"],
                                         app_config["ats_key"])
        self.tableservice.retry = ExponentialRetry(initial_backoff=30,
                                                   increment_base=2,
                                                   max_attempts=20).retry

    def getStudent(self, userId):
        try:
            #Try to get the user data based on his year
            return self.tableservice.get_entity('vbotusers', "VMEG", userId)
        except azure.common.AzureMissingResourceHttpError:
            return {}

    def insertOrUpdateStudent(self, studentdata):
        #Partition the Data Based on Year
        studentdata["PartitionKey"] = "VMEG"
        self.tableservice.insert_or_replace_entity("vbotusers", studentdata)
Пример #15
0
    def test_update(self):
        release = make_mag_release(self.account_name, self.account_key, 2019,
                                   6, 1)
        try:
            release.create()

            # Update release
            release.state = MagState.archived
            release.archived_date = pendulum.utcnow().microsecond_(0)
            release.update()

            # Verify that release is updated
            service = TableService(account_name=self.account_name,
                                   account_key=self.account_key)
            entity = service.get_entity(MagRelease.TABLE_NAME,
                                        release.partition_key, release.row_key)
            updated_release = MagRelease.from_entity(entity)
            self.assertEqual(release.state, updated_release.state)
            self.assertEqual(release.archived_date,
                             updated_release.archived_date)
        finally:
            release.delete()
Пример #16
0
def table():
    account_name = config.STORAGE_ACCOUNT_NAME
    account_key = config.STORAGE_ACCOUNT_KEY
    table_service = TableService(account_name=account_name,
                                 account_key=account_key)
    table_name = config.TABLE_NAME
    #table_service.create_table(table_name)

    imageId = str(uuid.uuid4())
    task = Entity()
    task.PartitionKey = 'dlws'
    task.RowKey = imageId
    task.description = 'test'
    table_service.insert_or_replace_entity(table_name, task)

    task = table_service.get_entity(table_name, 'dlws', imageId)
    print(task.description)

    tasks = table_service.query_entities('tasktable')
    for task in tasks:
        print(task.description)
        print(task.RowKey)
Пример #17
0
def getFaceImage(table: TableService, rowkey, rect):
    escores_table = "escores"
    entity = table.get_entity(escores_table, partition_key, rowkey)
    fname = entity.ts

    blob_name = "hackgt19"
    blob_key = "24wGa1RHd0BnemSDBbqRzvvTAB7Qy4IAN28E9de6OLR98wxnFljJXnKaBtzqJd2F53SmtNZP2NnZCPZkeL6wlQ=="
    try:
        container_name = 'imageblobs'
        # Create the BlockBlockService that is used to call the Blob service for the storage account
        block_blob_service = BlockBlobService(account_name=blob_name, account_key=blob_key)

        blob_bytes = block_blob_service.get_blob_to_bytes(container_name, fname)
        logging.info("Downloaded '{}' Successful".format(fname))

        img = Image.open(io.BytesIO(blob_bytes))
        img = img.crop(rect['left'], rect['top'], rect['left'] + rect['width'], rect['top'] + rect['height'])
        
        return img
    except Exception as e:
        logging.error("Error: {}".format(e))

    return None
Пример #18
0
def login(email, password):
    # Retrieve login information from our table
    table_service = TableService(
        account_name=azure_storage_credentials["account_name"],
        account_key=azure_storage_credentials["access_key"])
    user = table_service.get_entity('user', '*****@*****.**', '')

    email = user["PartitionKey"]
    pepper = get_pepper()
    password = bytes(str(password + pepper).encode('utf-8'))
    password_hash = user["PasswordHash"].encode('utf-8')

    # Calculate a hash of our password + pepper
    #hashed = bcrypt.hashpw(password_pepper,bcrypt.gensalt())

    #Login
    if bcrypt.checkpw(password, password_hash):
        print('Log in successful!')
        # TODO: generate and return JWT
    else:
        print('You have not logged in')
        # TODO: return error message

    asdf = "asdf"
Пример #19
0
class AzureOperationsStorage(BasicOperationStorage):
    """
        Implementation of :class:`.interface.IOperationStorage` with Azure Table Storage using the
        default implementation :class:`.interface.BasicOperationStorage`

        On creating a connection to the storage is initialized and all needed
        tables are created. If a purge is necessary, tables are not deleted but simple the content removed.
        Table creation can take a while with Azure Table Storage.

        As Azure Table Storage only supports two indices, the operations are inserted multiple times in different
        tables to enable multi-index queries.
    """
    def get_retry_exceptions(self):
        return (NewConnectionError)

    @retry_auto_reconnect
    def __init__(self, azure_config, purge=False):
        super(AzureOperationsStorage, self).__init__()

        if not azure_config:
            raise Exception("No azure table storage configuration provided!")
        self._azure_config = azure_config

        # ensure defaults
        self._azure_config["operation_table"] = self._azure_config.get(
            "operation_table", "operations")
        self._azure_config["address_table"] = self._azure_config.get(
            "address_table", "address")
        self._azure_config["status_table"] = self._azure_config.get(
            "status_table", "status")
        self._azure_config["balances_table"] = self._azure_config.get(
            "balances_table", "balances")

        if not self._azure_config["account"]:
            raise Exception(
                "Please include the azure account name in the config")
        if not self._azure_config["key"]:
            raise Exception(
                "Please include the azure account key in the config")

        self._service = TableService(
            account_name=self._azure_config["account"],
            account_key=self._azure_config["key"])

        # if tables doesnt exist, create it
        self._create_operations_storage(purge)
        self._create_status_storage(purge)
        self._create_address_storage(purge)
        self._create_balances_storage(purge)

    def _debug_print(self, operation):
        from pprint import pprint
        pprint(operation)

    def _create_address_storage(self, purge):
        _varients = ["balance", "historyfrom", "historyto"]

        for variant in _varients:
            tablename = self._azure_config["address_table"] + variant
            if purge:
                try:
                    for item in self._service.query_entities(tablename):
                        self._service.delete_entity(tablename,
                                                    item["PartitionKey"],
                                                    item["RowKey"])
                except AzureHttpError:
                    pass
                except AzureMissingResourceHttpError:
                    pass
            while not self._service.exists(tablename):
                self._service.create_table(tablename)
                time.sleep(0.1)

    def _create_status_storage(self, purge):
        if purge:
            try:
                tablename = self._azure_config["status_table"]
                for item in self._service.query_entities(tablename):
                    self._service.delete_entity(tablename,
                                                item["PartitionKey"],
                                                item["RowKey"])
            except AzureMissingResourceHttpError:
                pass
        while not self._service.exists(self._azure_config["status_table"]):
            self._service.create_table(self._azure_config["status_table"])
            time.sleep(0.1)

    def _create_balances_storage(self, purge):
        if purge:
            try:
                tablename = self._azure_config["balances_table"]
                for item in self._service.query_entities(tablename):
                    self._service.delete_entity(tablename,
                                                item["PartitionKey"],
                                                item["RowKey"])
            except AzureMissingResourceHttpError:
                pass
        while not self._service.exists(self._azure_config["balances_table"]):
            self._service.create_table(self._azure_config["balances_table"])
            time.sleep(0.1)

    def _create_operations_storage(self, purge):
        self._operation_varients = [
            "incident", "statuscompleted", "statusfailed", "statusinprogress"
        ]  #  "customer"
        self._operation_tables = {}
        for variant in self._operation_varients:
            self._operation_tables[
                variant] = self._azure_config["operation_table"] + variant

        self._operation_prep = {
            "statusinprogress": lambda op: {
                "PartitionKey": self._short_digit_hash(op["chain_identifier"]),
                "RowKey": op["chain_identifier"]
            },
            "statuscompleted": lambda op: {
                "PartitionKey": self._short_digit_hash(op["chain_identifier"]),
                "RowKey": op["chain_identifier"]
            },
            "statusfailed": lambda op: {
                "PartitionKey": self._short_digit_hash(op["chain_identifier"]),
                "RowKey": op["chain_identifier"]
            },
            "customer": lambda op: {
                "PartitionKey": op["customer_id"],
                "RowKey": op["chain_identifier"]
            },
            "incident": lambda op: {
                "PartitionKey": self._short_digit_hash(op["incident_id"]),
                "RowKey": op["incident_id"]
            }
        }
        for variant in self._operation_varients:
            if purge:
                try:
                    tablename = self._operation_tables[variant]
                    for item in self._service.query_entities(tablename):
                        self._service.delete_entity(tablename,
                                                    item["PartitionKey"],
                                                    item["RowKey"])
                except AzureMissingResourceHttpError:
                    pass
            while not self._service.exists(self._operation_tables[variant]):
                self._service.create_table(self._operation_tables[variant])
                time.sleep(0.1)

    def _get_with_ck(self, variant, operation):
        with_ck = operation.copy()
        with_ck.update(self._operation_prep[variant](with_ck))
        return with_ck

    def _short_digit_hash(self, value):
        hash_type = Config.get("operation_storage",
                               "key_hash",
                               "type",
                               default="crc32")

        if hash_type == "crc32":
            short_hash = hex(zlib.crc32(value.encode(encoding='UTF-8')))
            short_hash = short_hash[2:len(short_hash)]

        elif hash_type == "sha256":
            checker = hashlib.sha256()
            checker.update(value.encode(encoding='UTF-8'))
            short_hash = checker.hexdigest()
        return short_hash[0:Config.
                          get("operation_storage", "key_hash", "digits", 3)]

    @retry_auto_reconnect
    def track_address(self, address, usage="balance"):
        address = ensure_address_format(address)
        try:
            short_hash = self._short_digit_hash(address)
            logging.getLogger(__name__).debug("track_address with " +
                                              str(address) + ", hash " +
                                              str(short_hash))
            self._service.insert_entity(
                self._azure_config["address_table"] + usage, {
                    "PartitionKey": short_hash,
                    "RowKey": address,
                    "address": address,
                    "usage": usage
                })
        except AzureConflictHttpError:
            raise AddressAlreadyTrackedException

    @retry_auto_reconnect
    def untrack_address(self, address, usage="balance"):
        address = ensure_address_format(address)
        try:
            short_hash = self._short_digit_hash(address)
            logging.getLogger(__name__).debug("untrack_address with " +
                                              str(address) + ", hash " +
                                              str(short_hash))
            self._service.delete_entity(
                self._azure_config["address_table"] + usage, short_hash,
                address)
            try:
                self._delete_balance(address)
            except AzureMissingResourceHttpError:
                pass
        except AzureMissingResourceHttpError:
            raise AddressNotTrackedException()

    @retry_auto_reconnect
    def _get_address(self, address, usage="balance"):
        try:
            short_hash = self._short_digit_hash(address)
            logging.getLogger(__name__).debug("_get_address with " +
                                              str(address) + ", hash " +
                                              str(short_hash))
            return self._service.get_entity(
                self._azure_config["address_table"] + usage, short_hash,
                address)
        except AzureMissingResourceHttpError:
            raise AddressNotTrackedException()

    def _update(self, operation, status=None):
        try:
            mapping = {
                "in_progress": "statusinprogress",
                "completed": "statuscompleted",
                "failed": "statusfailed"
            }

            operation = self._get_with_ck("incident", operation.copy())
            new_operation = operation
            if status:
                tmp = self.get_operation(operation["incident_id"])
                new_operation["timestamp"] = tmp["timestamp"]
                new_operation["status"] = status
                new_operation = self._get_with_ck("incident", new_operation)

            logging.getLogger(__name__).debug(
                "_update: Table " + self._operation_tables["incident"] +
                " PartitionKey " + new_operation["PartitionKey"] + " " +
                new_operation["RowKey"])

            self._service.update_entity(self._operation_tables["incident"],
                                        new_operation)

            operation = self._get_with_ck("statuscompleted", operation.copy())
            new_operation = operation
            if status:
                tmp = self.get_operation(operation["incident_id"])
                new_operation["timestamp"] = tmp["timestamp"]
                new_operation["status"] = status
                new_operation = self._get_with_ck("statuscompleted",
                                                  new_operation)
            self._service.update_entity(
                self._operation_tables["statuscompleted"], new_operation)

            logging.getLogger(__name__).debug(
                "_update: Table " + self._operation_tables["statuscompleted"] +
                " PartitionKey " + new_operation["PartitionKey"] + " " +
                new_operation["RowKey"])

            if status:
                # needs delete and insert
                try:
                    self._service.delete_entity(
                        self._operation_tables[mapping[operation["status"]]],
                        operation["PartitionKey"], operation["RowKey"])
                except AzureMissingResourceHttpError:
                    pass
                try:
                    self._service.insert_entity(
                        self._operation_tables[mapping[
                            new_operation["status"]]], new_operation)
                except AzureConflictHttpError:
                    # already exists, try update
                    self._service.update_entity(
                        self._operation_tables[mapping[
                            new_operation["status"]]], new_operation)
            else:
                self._service.update_entity(
                    self._operation_tables[mapping[new_operation["status"]]],
                    new_operation)
        except AzureMissingResourceHttpError:
            raise OperationNotFoundException()

    def _insert(self, operation):
        try:
            for variant in self._operation_varients:
                to_insert = operation.copy()
                to_insert.update(self._operation_prep[variant](to_insert))
                if not to_insert["PartitionKey"]:
                    raise AzureMissingResourceHttpError()
                if not to_insert["RowKey"]:
                    raise AzureMissingResourceHttpError()

                logging.getLogger(__name__).debug(
                    "_insert: Table " + self._operation_tables[variant] +
                    " PartitionKey " + to_insert["PartitionKey"] + " " +
                    to_insert["RowKey"])
                self._service.insert_entity(self._operation_tables[variant],
                                            to_insert)
        except AzureConflictHttpError:
            raise DuplicateOperationException()

    def _delete(self, operation):
        try:
            for variant in self._operation_varients:
                to_delete = operation.copy()
                to_delete.update(self._operation_prep[variant](to_delete))
                self._service.delete_entity(self._operation_tables[variant],
                                            to_delete["PartitionKey"],
                                            to_delete["RowKey"])
        except AzureMissingResourceHttpError:
            raise OperationNotFoundException()

    @retry_auto_reconnect
    def flag_operation_completed(self, operation):
        # do basics
        operation = super(AzureOperationsStorage,
                          self).flag_operation_completed(operation)

        self._update(operation, status="completed")

        self._ensure_balances(operation)

    @retry_auto_reconnect
    def flag_operation_failed(self, operation, message=None):
        # do basics
        operation = super(AzureOperationsStorage,
                          self).flag_operation_failed(operation)
        operation["message"] = message
        self._update(operation, status="failed")

    @retry_auto_reconnect
    def insert_operation(self, operation):
        # do basics
        operation = super(AzureOperationsStorage,
                          self).insert_operation(operation)

        error = None
        try:
            self._insert(operation)
        except DuplicateOperationException as e:
            error = e

        try:
            # always check if balances are ok
            if operation["status"] == "completed":
                self._ensure_balances(operation)
        except BalanceConcurrentException as e:
            if error is None:
                error = e

        if error is not None:
            raise error

    @retry_auto_reconnect
    def _delete_balance(self, address, if_match='*'):
        self._service.delete_entity(self._azure_config["balances_table"],
                                    self._short_digit_hash(address),
                                    address,
                                    if_match=if_match)

    @retry_auto_reconnect
    def _ensure_balances(self, operation):
        affected_address = get_tracking_address(operation)
        logging.getLogger(__name__).debug("_ensure_balances: with " +
                                          operation["chain_identifier"] +
                                          " for address " +
                                          str(affected_address))
        try:
            self._get_address(affected_address)
        except AddressNotTrackedException:
            # delte if exists and return
            try:
                self._delete_balance(affected_address)
            except AzureMissingResourceHttpError:
                pass
            return

        try:
            balance_dict = self._service.get_entity(
                self._azure_config["balances_table"],
                self._short_digit_hash(affected_address), affected_address)
            insert = False
        except AzureMissingResourceHttpError as e:
            balance_dict = {"address": affected_address}
            balance_dict["PartitionKey"] = self._short_digit_hash(
                balance_dict["address"])
            balance_dict["RowKey"] = balance_dict["address"]
            insert = True

        if operation["block_num"] < balance_dict.get("blocknum", 0):
            raise BalanceConcurrentException()
        elif operation["block_num"] == balance_dict.get("blocknum", 0) and\
                operation["txnum"] < balance_dict.get("txnum", 0):
            raise BalanceConcurrentException()
        elif operation["block_num"] == balance_dict.get("blocknum", 0) and\
                operation["txnum"] == balance_dict.get("txnum", 0) and\
                operation["opnum"] <= balance_dict.get("opnum", 0):
            raise BalanceConcurrentException()

        balance_dict["blocknum"] = max(balance_dict.get("blocknum", 0),
                                       operation["block_num"])
        balance_dict["txnum"] = max(balance_dict.get("txnum", 0),
                                    operation["tx_in_block"])
        balance_dict["opnum"] = max(balance_dict.get("opnum", 0),
                                    operation["op_in_tx"])
        total = 0

        addrs = split_unique_address(affected_address)
        asset_id = "balance" + operation["amount_asset_id"].split("1.3.")[1]
        if addrs["account_id"] == operation["from"]:
            # internal transfer and withdraw

            # negative
            balance = balance_dict.get(asset_id, 0)

            balance_dict[asset_id] = balance - operation["amount_value"]

            # fee as well
            asset_id = operation["fee_asset_id"]
            balance = balance_dict.get(asset_id, 0)

            balance_dict[asset_id] = balance - operation["fee_value"]
        elif addrs["account_id"] == operation["to"]:
            # deposit

            # positive
            balance = balance_dict.get(asset_id, 0)

            balance_dict[asset_id] = balance + operation["amount_value"]

            # fees were paid by someone else
        else:
            raise InvalidOperationException()

        for key, value in balance_dict.items():
            if key.startswith("balance"):
                total = total + value

        if total == 0:
            if not insert:
                try:
                    self._delete_balance(affected_address,
                                         if_match=balance_dict.etag)
                except AzureMissingResourceHttpError:
                    pass
            return

        # may be updated or inserted, total > 0
        if (insert):
            try:
                self._service.insert_entity(
                    self._azure_config["balances_table"], balance_dict)
            except AzureMissingResourceHttpError:
                raise OperationStorageException(
                    "Critical error in database consistency")
        else:
            try:
                self._service.update_entity(
                    self._azure_config["balances_table"],
                    balance_dict,
                    if_match=balance_dict.etag)
            except AzureConflictHttpError:
                raise OperationStorageException(
                    "Critical error in database consistency")

    @retry_auto_reconnect
    def insert_or_update_operation(self, operation):
        # do basics
        operation = super(AzureOperationsStorage,
                          self).insert_operation(operation)

        # check if this is from in_progress to complete (for withdrawals we need to find incident id as its
        # not stored onchain)
        try:
            logging.getLogger(__name__).debug(
                "insert_or_update_operation: check if in_progress with " +
                str(operation["chain_identifier"]) + " exists")
            existing_operation = self.get_operation_by_chain_identifier(
                "in_progress", operation["chain_identifier"])
            logging.getLogger(__name__).debug(
                "insert_or_update_operation: found existing in_progress operation"
            )
            if not existing_operation["incident_id"] == operation["incident_id"] and\
                    operation["incident_id"] == operation["chain_identifier"]:
                logging.getLogger(__name__).debug(
                    "insert_or_update_operation: using preset incident_id " +
                    str(existing_operation["incident_id"]))
                operation["incident_id"] = existing_operation["incident_id"]
        except OperationNotFoundException:
            existing_operation = None

        if existing_operation is None:
            try:
                logging.getLogger(__name__).debug(
                    "insert_or_update_operation: attempting insert")

                error = None
                try:
                    self._insert(operation)
                except DuplicateOperationException as e:
                    error = e

                try:
                    # always check if balances are ok
                    if operation["status"] == "completed":
                        self._ensure_balances(operation)
                except BalanceConcurrentException as e:
                    if error is None:
                        error = e

                if error is not None:
                    raise error
            except DuplicateOperationException as ex:
                logging.getLogger(__name__).debug(
                    "insert_or_update_operation: fallback to update")
                # could be an update to completed ...
                if operation.get("block_num"):
                    try:
                        operation.pop("status")
                        self.flag_operation_completed(operation)
                    except OperationNotFoundException:
                        raise ex
                else:
                    raise ex
        else:
            logging.getLogger(__name__).debug(
                "insert_or_update_operation: attempting update")
            if operation.get("block_num"):
                try:
                    operation.pop("status")
                    self.flag_operation_completed(operation)
                except OperationNotFoundException:
                    raise ex

    @retry_auto_reconnect
    def delete_operation(self, operation_or_incident_id):
        # do basics
        operation = super(AzureOperationsStorage,
                          self).delete_operation(operation_or_incident_id)

        if type(operation_or_incident_id) == str:
            operation = self.get_operation(operation_or_incident_id)
        else:
            operation = operation_or_incident_id
        self._delete(operation)

    @retry_auto_reconnect
    def get_operation_by_chain_identifier(self, status, chain_identifier):
        mapping = {
            "in_progress": "statusinprogress",
            "completed": "statuscompleted",
            "failed": "statusfailed"
        }
        try:
            operation = self._service.get_entity(
                self._operation_tables[mapping[status]],
                self._short_digit_hash(chain_identifier), chain_identifier)
            operation.pop("PartitionKey")
            operation.pop("RowKey")
            operation.pop("Timestamp")
            operation.pop("etag")
        except AzureMissingResourceHttpError:
            raise OperationNotFoundException()
        return operation

    @retry_auto_reconnect
    def get_operation(self, incident_id):
        try:
            short_hash = self._short_digit_hash(incident_id)
            logging.getLogger(__name__).debug("get_operation with " +
                                              str(incident_id) + ", hash " +
                                              str(short_hash))
            operation = self._service.get_entity(
                self._operation_tables["incident"], short_hash, incident_id)
            operation.pop("PartitionKey")
            operation.pop("RowKey")
            operation.pop("Timestamp")
            operation.pop("etag")
        except AzureMissingResourceHttpError:
            raise OperationNotFoundException()
        return operation

    @retry_auto_reconnect
    def get_balances(self,
                     take,
                     continuation=None,
                     addresses=None,
                     recalculate=False):
        if recalculate:
            raise Exception(
                "Currently not supported due to memo change on withdraw")
            return self._get_balances_recalculate(take, continuation,
                                                  addresses)
        else:
            if continuation is not None:
                try:
                    continuation_marker = json.loads(continuation)
                except TypeError:
                    raise InputInvalidException()
                except JSONDecodeError:
                    raise InputInvalidException()

                balances = self._service.query_entities(
                    self._azure_config["balances_table"],
                    num_results=take,
                    marker=continuation_marker)
            else:
                balances = self._service.query_entities(
                    self._azure_config["balances_table"], num_results=take)
            return_balances = {}
            for address_balance in balances:
                return_balances[address_balance["address"]] = {
                    "block_num": address_balance["blocknum"]
                }
                for key, value in address_balance.items():
                    if key.startswith("balance"):
                        asset_id = "1.3." + key.split("balance")[1]
                        return_balances[
                            address_balance["address"]][asset_id] = value
            return_balances["continuation"] = None
            if balances.next_marker:
                return_balances["continuation"] = json.dumps(
                    balances.next_marker)
            return return_balances

    @retry_auto_reconnect
    def _get_balances_recalculate(self,
                                  take,
                                  continuation=None,
                                  addresses=None):
        address_balances = collections.defaultdict(
            lambda: collections.defaultdict())

        if not addresses:
            if continuation is not None:
                try:
                    continuation_marker = json.loads(continuation)
                except TypeError:
                    raise InputInvalidException()
                except JSONDecodeError:
                    raise InputInvalidException()

                addresses = self._service.query_entities(
                    self._azure_config["address_table"] + "balance",
                    num_results=take,
                    marker=continuation_marker)
            else:
                addresses = self._service.query_entities(
                    self._azure_config["address_table"] + "balance",
                    num_results=take)
            if addresses.next_marker:
                address_balances["continuation"] = json.dumps(
                    addresses.next_marker)
            addresses = [x["address"] for x in addresses]

        if type(addresses) == str:
            addresses = [addresses]

        for address in addresses:
            addrs = split_unique_address(address)
            max_block_number = 0
            for operation in self.get_operations_completed(
                    filter_by={"customer_id": addrs["customer_id"]}):
                this_block_num = operation["block_num"]
                asset_id = operation["amount_asset_id"]
                if addrs["account_id"] == operation["from"]:
                    # negative
                    balance = address_balances[address].get(asset_id, 0)

                    address_balances[address][asset_id] =\
                        balance - operation["amount_value"]

                    # fee as well
                    asset_id = operation["fee_asset_id"]
                    balance = address_balances[address].get(asset_id, 0)

                    address_balances[address][asset_id] =\
                        balance - operation["fee_value"]
                elif addrs["account_id"] == operation["to"]:
                    # positive
                    balance = address_balances[address].get(asset_id, 0)

                    address_balances[address][asset_id] =\
                        balance + operation["amount_value"]
                else:
                    raise InvalidOperationException()
                max_block_number = max(max_block_number, this_block_num)
            if max_block_number > 0:
                address_balances[address]["block_num"] = max_block_number

        # do not return default dicts
        for key, value in address_balances.items():
            if type(value) == collections.defaultdict:
                address_balances[key] = dict(value)
        return dict(address_balances)

    def _parse_filter(self, filter_by):
        if filter_by:
            if filter_by.get("customer_id"):
                return {"customer_id": filter_by.pop("customer_id")}
            if filter_by.get("address"):
                addrs = split_unique_address(filter_by.pop("address"))
                return {"customer_id": addrs["customer_id"]}
            if filter_by.get("from"):
                addrs = split_unique_address(filter_by.pop("from"))
                return {"from": addrs["account_id"]}
            if filter_by.get("to"):
                addrs = split_unique_address(filter_by.pop("to"))
                return {"to": addrs["account_id"]}
            if filter_by:
                raise Exception("Filter not supported")
        return {}

    def _filter_dict_to_string(self, filter_dict, partition_key=None):
        filter_str = None
        for key, value in filter_dict.items():
            if partition_key == key:
                key = "PartitionKey"
            if filter_str is not None:
                delimiter = " and "
            delimiter = ""
            filter_str = delimiter + key + " eq '" + value + "'"
        return filter_str

    @retry_auto_reconnect
    def get_operations_in_progress(self, filter_by=None):
        mapping = {
            "in_progress": "statusinprogress",
            "completed": "statuscompleted",
            "failed": "statusfailed"
        }

        filter_dict = {}
        filter_dict.update(self._parse_filter(filter_by))

        filter_str = self._filter_dict_to_string(filter_dict, "status")

        return list(
            self._service.query_entities(
                self._operation_tables[mapping["in_progress"]], filter_str))

    @retry_auto_reconnect
    def get_operations_completed(self, filter_by=None):
        mapping = {
            "in_progress": "statusinprogress",
            "completed": "statuscompleted",
            "failed": "statusfailed"
        }

        filter_dict = {}
        filter_dict.update(self._parse_filter(filter_by))

        filter_str = self._filter_dict_to_string(filter_dict, "status")

        return list(
            self._service.query_entities(
                self._operation_tables[mapping["completed"]], filter_str))

    @retry_auto_reconnect
    def get_operations_failed(self, filter_by=None):
        mapping = {
            "in_progress": "statusinprogress",
            "completed": "statuscompleted",
            "failed": "statusfailed"
        }

        filter_dict = {}
        filter_dict.update(self._parse_filter(filter_by))

        filter_str = self._filter_dict_to_string(filter_dict, "status")

        return list(
            self._service.query_entities(
                self._operation_tables[mapping["failed"]], filter_str))

    @retry_auto_reconnect
    def get_last_head_block_num(self):
        try:
            document = self._service.get_entity(
                self._azure_config["status_table"], "head_block_num", "last")
            return document["last_head_block_num"]
        except AzureMissingResourceHttpError:
            return 0

    @retry_auto_reconnect
    def set_last_head_block_num(self, head_block_num):
        current_last = self.get_last_head_block_num()
        if current_last >= head_block_num:
            raise Exception("Marching backwards not supported! Last: " +
                            str(current_last) + " New: " + str(head_block_num))
        self._service.insert_or_replace_entity(
            self._azure_config["status_table"], {
                "PartitionKey": "head_block_num",
                "RowKey": "last",
                "last_head_block_num": head_block_num
            })
Пример #20
0
class AzureTable(object):
    def __init__(self, account_name: str, account_key: str, table_name: str,
                 partition_key_field: str, clustering_key_field: str):
        self.table = TableService(account_name=account_name,
                                  account_key=account_key)
        self.table_name = self.table_name
        self.partition_key_field = partition_key_field
        self.clustering_key_field = clustering_key_field

    @property
    def partition_key_name(self) -> str:
        return 'PartitionKey'

    @property
    def clustering_key_name(self) -> str:
        return 'RowKey'

    def get_payload(self, payload: dict):
        item = deepcopy(payload)
        partition_key = payload.get(self.partition_key_field)
        clustering_key = payload.get(self.clustering_key_field)
        if partition_key is None:
            raise PartitionKeyNotFoundError(
                'payload={} does not have a partition key')
        if clustering_key is None:
            raise ClusteringKeyNotFoundError(
                'payload={} does not have a clustering key')

        item.update({
            self.partition_key_name: partition_key,
            self.clustering_key_name: clustering_key
        })

        return item

    def create(self):
        return self.table.create_table(self.table_name)

    def insert(self, item: dict):
        return self.table.insert_entity(self.table_name,
                                        self.get_payload(item))

    def update(self, item: dict):
        pass

    def upsert(self, item: dict):
        pass

    def delete(self, key: str):
        pass

    def read(self, key: str):
        pass

    def insert_batch(self, items: list):
        batch = TableBatch()
        for item in items:
            batch.insert_entity(self.get_payload(item))

        return self.table.commit_batch(self.table_name, batch)

    def get(self, partition_key: str, clustering_key: str):
        return self.table.get_entity(self.table_name, partition_key,
                                     clustering_key)

    def get_by_partition(self, partition_key: str) -> list:
        return self.table.query_entities(self.table_name,
                                         filter="{} eq '{}'".format(
                                             self.partition_key_name,
                                             partition_key))
table_service.commit_batch('tasktable', batch)

# alternative way to use batch, using context
print("batch insert using context...")
task006 = {'PartitionKey': 'tasksSeattle', 'RowKey': '006',
           'description': 'Go grocery shopping', 'priority': 400}
task007 = {'PartitionKey': 'tasksSeattle', 'RowKey': '007', 'newCol': 'newColVal1',
           'description': 'Clean the bathroom', 'priority': 100}

with table_service.batch('tasktable') as batch:
    batch.insert_entity(task006)
    batch.insert_entity(task007)

# Query
print("Query task 001...")
task = table_service.get_entity('tasktable', 'tasksSeattle', '001')
print(task.description)
print(task.priority)

# Query a set of entities
print("Query set of entities...")
tasks = table_service.query_entities(
    'tasktable', filter="PartitionKey eq 'tasksSeattle'")
for task in tasks:
    print(task.description)
    print(task.priority)
    try:
       print(task.newCol)
    except AttributeError:
       print("No newCol.")
Пример #22
0
with open(upload_file_path, "rb") as data:
    blob_client.upload_blob(data)
    
    
    
table_service = TableService(account_name='<Name of storage account>', account_key='<TableStorage Account Key>')
#table_service.create_table('CarDetails')

#Insert Record
car1 = {'PartitionKey': 'Car', 'RowKey' : '<NumberPlate>',
        'OwnerName' : 'xxxxx', 'ContactNo' : '+91-xxxxx xxxxx', 'PollutionLevel' : 0, 'LastCheckedDate' : '2020-11-15',
        'AmountToBePaid' : 0, 'TimeStamps' : ""}
table_service.insert_entity('CarDetails', car1)

#Query
task = table_service.get_entity('CarDetails', 'Car', str(text))

mq = MQ();
perc = mq.MQPercentage()
poll = perc["CO"]
today = date.today()
l = str(task.LastCheckedDate)
l = date(int(l[0:4]), int(l[5:7]), int(l[8:]))
delta = (today - l)
if poll>500 and delta.days > 10:
  task.PollutionLevel = poll
  task.LastCheckedDate = today
  task.AmountToBePaid = task.AmountToBePaid + 100
  task.TimeStamps = task.TimeStamps + "-" + str(ts)
  #Logic App
  Message = "Hello " + str(task.OwnerName) + ", It is observed that the pollution levels for your vehicle " + str(task.RowKey) + " is above the normal level. Please pay a fine of Rs. " + str(task.AmountToBePaid) + " online or in your nearest RTO office."
Пример #23
0
        ser.write(bytearray([0x31]))
        time.sleep(delaytime)


if __name__ == '__main__':
    # 時間標記
    memoryTime = time.time()
    openDoor = False

    # 實體化 Azure Table
    table_service = TableService(account_name='acc_name',
                                 account_key='acc_key')
    print(
        table_service.get_entity('testAPPs',
                                 'ComputerVision',
                                 'instruct',
                                 select='action').action)

    # 無窮迴圈
    while True:
        # 如果 Table 的 action 欄位是 open
        if table_service.get_entity('testAPPs',
                                    'ComputerVision',
                                    'instruct',
                                    select='action').action == 'open':
            # 開門
            if openDoor == False:
                openDoor = True
                memoryTime = time.time()
                controlDoor('open')
            # 三秒後關門,並把 Table 的 action 欄位改為 close
Пример #24
0
class Azure_Storage():
    def __init__(self, create_new=False):
        account_name = config.STORAGE_ACCOUNT_NAME
        account_key = config.STORAGE_ACCOUNT_KEY

        self.task_queue_name = config.TASK_QUEUE_NAME
        self.table_name = config.TABLE_NAME
        self.container_name = config.BLOB_CONTAINER_NAME
        self.ImagePartitionKey = config.IMAGE_PARTITION_KEY

        self.table_service = TableService(account_name=account_name,
                                          account_key=account_key)
        self.block_blob_service = BlockBlobService(account_name=account_name,
                                                   account_key=account_key)
        self.queue_service = QueueService(account_name=account_name,
                                          account_key=account_key)

        if create_new:
            queue_service.create_queue(task_queue_name)
            block_blob_service.create_container(container_name)
            table_service.create_table(table_name)

    def put_image(self, image_uuid, image_bytes):
        ret = self.block_blob_service.create_blob_from_bytes(
            self.container_name, image_uuid, image_bytes)
        return ret

    def get_image(self, image_uuid):
        ret = self.block_blob_service.get_blob_to_bytes(
            self.container_name, image_uuid).content
        return ret

    def put_classification_result(self, image_uuid, results):
        task = Entity()
        task.PartitionKey = self.ImagePartitionKey
        task.RowKey = image_uuid
        task.results = str(results)
        ret = self.table_service.insert_or_replace_entity(
            self.table_name, task)
        return ret

    def get_classification_result(self, image_uuid):
        try:
            task = self.table_service.get_entity(self.table_name,
                                                 self.ImagePartitionKey,
                                                 image_uuid)
            return task.results
        except Exception as e:
            return None

    def put_task(self, taskmsg):
        ret = self.queue_service.put_message(self.task_queue_name, taskmsg)
        return ret

    #payload is in message.content
    def get_task(self, num_messages=16):
        messages = self.queue_service.get_messages(self.task_queue_name,
                                                   num_messages=num_messages,
                                                   visibility_timeout=1 * 60)
        return messages

    def delete_task(self, message):
        ret = self.queue_service.delete_message(self.task_queue_name,
                                                message.id,
                                                message.pop_receipt)
        return ret
Пример #25
0
   'color': 'Green',
   'price': '15'
}
table_service.insert_entity(table_name, entity_example)
entity_example = {
   'PartitionKey': 'Bike',
   'RowKey': 'Audi',
   'text': 'Germany',
   'color': 'Green',
   'price': '5'
}
table_service.insert_entity(table_name, entity_example)

# COMMAND ----------

task = table_service.get_entity(table_name, 'Car', 'Tesla')
print(task)

# COMMAND ----------

tasks = table_service.query_entities(table_name, filter="price eq '5'")
for task in tasks:
    print(task.PartitionKey)
    print(task.RowKey)
    print(task.text)
    print("---")

# COMMAND ----------


Пример #26
0
class Storage_az_table():

    ##################################################################
    # BEGIN - Common methods to implement storage interface

    def __init__(self):
        connection_string = os.environ['SHKOLA_AZ_TABLE_CONN_STR']
        self.table_service = TableService(connection_string=connection_string)

        self.default_partition_key = "USER"
        self.users_table_name = 'users'
        self.responses_table_name = 'responses'
        self.sessions_table_name = 'sessions'
        self.feedbacks_table_name = 'feedbacks'

        tables = [
            self.users_table_name, self.responses_table_name,
            self.sessions_table_name, self.feedbacks_table_name
        ]

        existing_tables = list(
            map(lambda x: x.name, self.table_service.list_tables()))

        for table in tables:
            if table not in existing_tables:
                self.table_service.create_table(table)

    def get_user(self, user_id):
        partition_key = self.default_partition_key

        try:
            entity = self.table_service.get_entity(self.users_table_name,
                                                   partition_key, user_id)
        except AzureMissingResourceHttpError:
            return None

        entity["user_id"] = user_id
        return entity

    def update_user(self,
                    user_id,
                    name=None,
                    remote_ip=None,
                    user_agent=None,
                    picture=None,
                    user_language=None,
                    selected_language=None,
                    last_accessed=None):
        properties = dict()

        # Nothing better at the moment:
        properties['PartitionKey'] = self.default_partition_key
        properties['RowKey'] = user_id
        properties['user_id'] = user_id

        properties["name"] = name
        properties["remote_ip"] = remote_ip
        properties["user_agent"] = user_agent
        properties["picture"] = picture
        properties["user_language"] = user_language
        if selected_language:
            properties["selected_language"] = selected_language
        properties["last_accessed"] = last_accessed

        logging.debug("azure table update_user %s: %s", str(name),
                      str(properties))

        try:
            self.table_service.insert_or_merge_entity(self.users_table_name,
                                                      properties)
        except Exception:
            logging.exception('Error adding to table ' +
                              self.users_table_name +
                              ' record: {}'.format(properties))

    def update_selected_language(self, user_id, selected_language):
        properties = dict()

        # Nothing better at the moment:
        properties['PartitionKey'] = self.default_partition_key
        properties['RowKey'] = user_id
        properties['user_id'] = user_id

        properties["selected_language"] = selected_language
        logging.debug("azure table update_user_langauge %s: %s", str(user_id),
                      str(properties))

        try:
            self.table_service.insert_or_merge_entity(self.users_table_name,
                                                      properties)
        except Exception:
            logging.exception('Error adding to table ' +
                              self.users_table_name +
                              ' record: {}'.format(properties))

    def insert_user_id(self, user_id):
        self.update_user(user_id)
        return user_id

    # Get all user responses from the response table
    def get_all_user_results(self, u_id, from_date=None):

        req = "(PartitionKey eq '{}')".format(u_id)

        if from_date:
            if len(req) > 0:
                req = req + " and "
            req = req + "(Timestamp ge datetime'{}')".format(from_date)

        entries = self.table_service.query_entities(self.responses_table_name,
                                                    req)

        result = []
        for row in entries:
            result.append(row)

        return result

    # Update user info with the latest <stats>, and with
    # <stats_time> being the time of the latest stats
    def update_user_stats(self, user_id, stats, stats_time):
        properties = dict()

        # Nothing better at the moment:
        properties['PartitionKey'] = self.default_partition_key
        properties['RowKey'] = user_id
        properties['user_id'] = user_id
        properties['stats'] = encode_dict(stats)
        properties['stats_time'] = stats_time

        try:
            self.table_service.merge_entity(self.users_table_name, properties)
        except Exception:
            logging.exception('Error updating results for user ' + user_id +
                              ' record: {}'.format(properties))

    @timer_section("storage.record_response")
    def record_response(self, response):
        fb_time = int(time.time() * 1000)

        response['PartitionKey'] = response['user_id']
        response['RowKey'] = response['question_id'] + "|" + \
                            str(response['attempt']) + "|" + \
                            str(fb_time) + "|" + \
                            str(response['duration'])

        # Remove special characters not allowed in Azure PartitionKey and RowKey
        response['PartitionKey'] = re.sub("[\ /?#]", "_",
                                          response['PartitionKey'])
        response['RowKey'] = re.sub("[\ /?#]", "_", response['RowKey'])

        #logging.debug("*** record response: {}".format(response))

        try:
            self.table_service.insert_entity(self.responses_table_name,
                                             response)
        except Exception as err:
            logging.exception('Error adding response: {}\n\n{}'.format(
                response, err))

    @timer_section("storage.record_feedback")
    def record_feedback(self, response):
        fb_time = int(time.time() * 1000)

        response['PartitionKey'] = response['question_id']
        response['RowKey'] = response['type'] + "|" + response[
            'list_id'] + "|" + str(fb_time)

        # Remove special characters not allowed in Azure PartitionKey and RowKey
        response['PartitionKey'] = re.sub("[\ /?#]", "_",
                                          response['PartitionKey'])
        response['RowKey'] = re.sub("[\ /?#]", "_", response['RowKey'])

        logging.debug("*** record feedback: {}".format(response))

        try:
            self.table_service.insert_entity(self.feedbacks_table_name,
                                             response)
        except Exception as err:
            logging.exception('Error adding feedback: ' + str(err))

    @timer_section("storage.update_session")
    def update_session(self, session_id, data={}):
        assert session_id is not None
        assert data['state_id'] is not None

        properties = {
            'PartitionKey': session_id,
            'RowKey': "",
            'data': encode_dict(data['data']),
            'user_id': data['user_id'],
            'state_id': data['state_id'],
            'valid': data['valid']
        }

        logging.debug(
            "storage: updating session: {}, valid={}, state_id={}".format(
                session_id, data['valid'], data['state_id']))

        try:
            self.table_service.insert_or_replace_entity(
                self.sessions_table_name, properties)
        except Exception:
            logging.exception('Error adding to table ' +
                              self.sessions_table_name +
                              ' record: {}'.format(properties))

    @timer_section("get_session")
    def get_session(self, session_id):
        try:
            entity = self.table_service.get_entity(self.sessions_table_name,
                                                   session_id, "")
        except AzureMissingResourceHttpError:
            return None

        # Azurite simulator returns an empty entity instead of exception, so check here
        if "user_id" not in entity.keys():
            return None

        logging.debug(
            "storage: loaded session: {}, valid={}, state_id={}".format(
                session_id, entity.get('valid'), entity.get('state_id')))

        # Compatibility: old records don't have state_id, valid
        if not "state_id" in entity:
            entity['state_id'] = None

        if not "valid" in entity:
            entity["valid"] = True

        return {
            # Convert "None" to None, see above
            # "user_id": entity["user_id"] if (not entity["user_id"] == "None") else None,
            "user_id": entity["user_id"],
            "data": decode_dict(entity["data"]),
            "state_id": entity["state_id"],
            "valid": entity["valid"]
        }

    # Get all user responses from the response table
    def get_all_user_sessions(self, u_id, from_date=None):

        req = "(user_id eq '{}')".format(u_id)

        if from_date:
            if len(req) > 0:
                req = req + " and "
            req = req + "(Timestamp ge datetime'{}')".format(
                from_date.isoformat())

        entries = self.table_service.query_entities(self.sessions_table_name,
                                                    req)

        result = []
        for row in entries:
            row["data"] = decode_dict(row["data"])
            result.append(row)

        return result

    # Get direct user feedback before given date
    def get_all_user_feedback(self, from_date=None):

        # Ignore JS and Google errors, only return specific user feedback
        req = "(type ne 'JS_ERROR') and (type ne 'GOOGLE_ERROR')"

        if from_date:
            if len(req) > 0:
                req = req + " and "
            req = req + "(Timestamp ge datetime'{}')".format(
                from_date.isoformat())

        entries = self.table_service.query_entities(self.feedbacks_table_name,
                                                    req)

        result = []
        for row in entries:
            result.append(row)

        return result

    # END - Common methods to implement storage interface
    ##################################################################

    # Doesn't really work
    # def wipe_tables(self):
    #     try:
    #         self.table_service.delete_entity(self.users_table_name, "", "")
    #     except Exception as err:
    #         print('Error wiping table, ' + self.users_table_name + ': ' + str(err))

    #     try:
    #         self.table_service.delete_entity(self.responses_table_name, "", "")
    #     except Exception as err:
    #         print('Error wiping table, ' + self.responses_table_name + ':' + str(err))

    # def delete_all_tables(self):
    #     try:
    #         self.table_service.delete_table(self.users_table_name)
    #     except Exception as err:
    #         logging.exception('Error deleting table, ' + self.users_table_name + ': ' + str(err))

    #     try:
    #         self.table_service.delete_table(self.responses_table_name)
    #     except Exception as err:
    #         logging.exception('Error deleting table, ' + self.responses_table_name + ':' + str(err))

    def get_all_responses(self, user_id=None):
        if user_id is None:
            req = ""
        else:
            req = "PartitionKey = {}".format(user_id)

        entries = self.table_service.query_entities(self.responses_table_name,
                                                    req)

        return entries

    def get_all_users(self):
        entries = self.table_service.query_entities(self.users_table_name, "")

        return entries

    def print_all_responses(self, user_id=None):
        entries = self.get_all_responses(user_id)

        if user_id is None:
            print("            USER_ID            ", end='')
        else:
            print("USER_ID = {}\n".format(user_id))

        print(
            "     QUESTION_ID          LIST_ID     RESPONSE_TYPE           TIME                DURATION      CORRECT   INCORRECT                QUESTIONS"
        )
        for response in entries:
            if user_id is None:
                print("{:^30} ".format(response['question_id']), end='')  # UID
            print("{:^20} ".format(response['user_id']), end='')  # QID
            print("{:^16} ".format(response['list_id']), end='')  # List ID
            print("{:^12} ".format(response['response_type']),
                  end='')  # RESPONSE TYPE
            print("{:^26} ".format(
                time.strftime("%d-%m-%y %H:%M:%S",
                              time.localtime(int(response['time'])))),
                  end='')  # TIME
            print("{:^16} ".format(response['duration']), end='')  # DURATION
            print("{:^10} ".format(response['correct']), end='')  # CORRECT
            print("{:^10} ".format(response['incorrect']), end='')  # INCORRECT
            print("{:^38} ".format(response['questions']))  # QUESTIONS

        print("\n")

    def print_all_users(self):
        entries = self.get_all_users()

        print(
            "           USER ID                    NAME                      EMAIL                 LAST ACCESSED          REMOTE IP                      USER AGENT             USER LANGUAGE"
        )
        for row in entries:
            print("{:^30} {:^20} {:^20} {:^20} {:^40} {:^40}".format(
                row['user_id'], row['name'],
                time.strftime("%d-%m-%y %H:%M:%S",
                              time.localtime(row['last_accessed'])),
                row['remote_ip'] if 'remote_ip' in row.keys() else "",
                row['user_agent'] if 'user_agent' in row.keys() else "",
                row['user_language'] if 'user_language' in row.keys() else ""))

        print("\n")

    def get_question_stats(self, q_id=None, from_date=None):
        req = ""

        if q_id:
            # Remove / from q_id
            # mq_id = ("".join("fractions/q00022".split("/")))
            mq_id = ("".join(q_id.split("/")))
            req = req + "((RowKey ge '{}|') and (RowKey lt '{}{}'))".format(
                mq_id, mq_id, chr(255))

        if from_date:
            if len(req) > 0:
                req = req + " and "
            req = req + "(Timestamp ge datetime'{}')".format(from_date)

        entries = self.table_service.query_entities(self.responses_table_name,
                                                    req)

        result = []
        for row in entries:
            if "user_id" not in row.keys() or \
                "UNKNOWN" in row["user_id"]:
                continue

            result.append(row)

        return result
HUBS = ["movehub", "cityhub", "technichub", "primehub", "nxt"]

print("Building commits...")

try:
    pybricks = git.Repo(PYBRICKS_PATH)
except Exception as e:
    print(f"Repository not found at '{PYBRICKS_PATH}':", e)
    print("try setting the PYBRICKS_PATH environment variable")
    sys.exit(1)

assert not pybricks.bare, "Repository not found"

service = TableService(STORAGE_ACCOUNT, STORAGE_KEY)

start_hash = service.get_entity(CI_STATUS_TABLE, "build", "lastHash")["hash"]

if start_hash == pybricks.commit(PYBRICKS_BRANCH).hexsha:
    print("Already up to date.")
    sys.exit(0)

# Process the commits in the tree and log the data
for commit in pybricks.iter_commits(f"{start_hash}..{PYBRICKS_BRANCH}"):
    print(f"trying {commit.hexsha}...")
    try:
        entity = service.get_entity(FIRMWARE_SIZE_TABLE, "size", commit.hexsha)
        # if entity is found but some hubs had null size, redo only those hubs
        hubs = [h for h in HUBS if entity.get(h) is None]
    except AzureMissingResourceHttpError:
        # if there is no entry at all, build all hubs
        hubs = HUBS
class GetTableData:
    def __init__(self,
                 connection_string=None,
                 storage_account_name=None,
                 account_key=None,
                 table_name=None,
                 **options):
        assert (storage_account_name and account_key) or connection_string
        self.storage_account_name = storage_account_name
        self.account_key = account_key
        self.table_name = table_name or "defaulttablepython"
        self.table_service        = TableService(account_name=self.storage_account_name, account_key=self.account_key) \
            if self.storage_account_name and self.account_key else None
        if connection_string:
            self.connection_string = connection_string
            self.table_service = TableService(
                connection_string=self.connection_string)
        self.differential_column = options.get(
            'differential_column') or "Timestamp"
        self.previous_diff_val = None
        self.executed_at = dt.utcnow()

    def get_data_on_partition_and_row_key(self, partition_key, row_key):
        """
        Get a particular entity by passing the partition_key and the row_key of the data
        :param partition_key:
        :param row_key:
        :return: an dict object, containing the data
        """
        return self.table_service.get_entity(self.table_name, partition_key,
                                             row_key)

    def get_data(self):
        """
        Get all the data from the table in a json format
        :return:
        """
        return [
            task for task in self.table_service.query_entities(self.table_name)
        ]

    def get_data_with_raw_filter(self, raw_query, select=None):
        """
        get data by using the filter conditions
        :param select: return the selected attributes
        :param raw_query: raw query format
        https://docs.microsoft.com/en-us/rest/api/storageservices/Querying-Tables-and-Entities?redirectedfrom=MSDN
        :return:
        """
        if isinstance(select, list):
            select = " , ".join(select)

        return [
            task for task in self.table_service.query_entities(
                self.table_name, filter=raw_query, select=select)
        ]

    @beartype
    def get_data_sql_interface(self, select: (None, str, list),
                               where: (None, str, list)):
        """
        Parse simple SQL like syntax and convert them into table query syntax
        :param where: condition
        :param select: Column names to be selected
        :return:
        """
        select_ = None
        filter_ = None

        if isinstance(select, str):
            if select != "*":
                select_ = select

        if isinstance(select, list):
            select_ = ",".join(select)

        if isinstance(where, str):
            filter_ = sanitize_str(where)

        if isinstance(where, list):
            san_filter = [sanitize_str(f) for f in where]
            filter_ = " and ".join(san_filter)

        return self.get_data_with_raw_filter(raw_query=filter_, select=select_)

    def return_differential_data(self,
                                 differential_column=None,
                                 initially_fetch_data_greater_than_this=None,
                                 **options):
        """
        Get incremental data on the specified column. This function will fetch data which inserted after
        the self.previous_diff_val when the function was previously called.

        :param differential_column: column on which the differential parameter will be implemented
        :param initially_fetch_data_greater_than_this: initial value to be fetched
        :param options:
        :return:
        """
        self.differential_column = differential_column if differential_column else self.differential_column

        if initially_fetch_data_greater_than_this and (self.previous_diff_val != initially_fetch_data_greater_than_this)\
                and (not self.previous_diff_val):
            self.previous_diff_val = initially_fetch_data_greater_than_this
        elif not self.previous_diff_val:
            self.previous_diff_val = f"{(dt.utcnow() - timedelta(minutes=1)).isoformat()[:-3]+'Z'}"

        if self.differential_column == "Timestamp":
            data = self.get_data_sql_interface(
                select="*",
                where=
                f"{self.differential_column} > datetime'{self.previous_diff_val}'"
            )
        else:
            data = self.get_data_sql_interface(
                select="*",
                where=f"{self.differential_column} > {self.previous_diff_val}")

        if len(data) == 0:
            return []

        pd.set_option("display.max_rows", None, "display.max_columns", None)
        pandas_df = pd.DataFrame(data)
        pandas_df.sort_values(by=self.differential_column, inplace=True)

        if self.differential_column == "Timestamp":
            self.previous_diff_val = pandas_df[
                self.differential_column].iloc[-1].isoformat()[:-9] + 'Z'
        else:
            self.previous_diff_val = pandas_df[
                self.differential_column].iloc[-1]
        return pandas_df

    @beartype
    def set_alert_on_live_data(self, parameter_name: str, threshold: int,
                               **options):
        """
        Set alert on data by giving parameter name and threshold
        :return:
        """
        alert = Alert(parameter_name=parameter_name,
                      threshold=threshold,
                      **options)
        alert.set_alert_on_live_data(
            diff_data_func=self.return_differential_data, **options)
Пример #29
0
    # define the bucket tgz file path
    bucket_tgz = bucket + '.tgz'

    # Research in the Azure Table if this entity exists and if its archiving status is success
    # If the entity complies with this, archiving is not required and this bucket_id was already archived on this peer or another peer
    # Otherwise, continue.

    bucket_is_archived = False
    record_found = False
    record_status = None

    try:
        with contextlib.redirect_stderr(None):
            record = table_service.get_entity(AZ_STORAGE_TABLE_NAME,
                                              AZ_BLOB_CONTAINER,
                                              bucket_id,
                                              select='status',
                                              timeout=60)
            record_status = record.status
            record_found = True
    except:
        record_found = False

    if record_found and record_status in "success":
        bucket_is_archived = True
    else:
        bucket_is_archived = False

    if bucket_is_archived:
        print(
            "This bucket has been archived already, nothing to do and exit 0")
Пример #30
0
# 要加這行才能順利執行
GPIO.setwarnings(False)


# Capture SIGINT for cleanup when the script is aborted
def end_read(signal, frame):
    global continue_reading
    print "\nCtrl+C captured, ending read."
    continue_reading = False
    GPIO.cleanup()


# Hook the SIGINT
signal.signal(signal.SIGINT, end_read)

# Welcome message
print "Press Ctrl-C to stop."

# 不斷讀取具有是否要開門命令的實體值
while continue_reading:
    # 查詢實體
    task = table_service.get_entity("Projects", "Door", "002")
    # 偵測是否為可通行的卡,是的話,則value值應為1,亮燈
    if task.Value == "1":
        GPIO.output(LEDPin, GPIO.HIGH)
    else:
        GPIO.output(LEDPin, GPIO.LOW)

    time.sleep(0.5)