def throttle_ip_requests(ip_entry):
    max_from_single_ip = 5
    table_service = TableService(account_name=os.environ['STORAGE_ACCOUNT'],
                                 account_key=os.environ['STORAGE_KEY'])
    table_service.create_table(
        table_name=os.environ['BILLING_TABLE'])  #create if it doesn't exist
    ip_row = None
    try:
        ip_row = table_service.get_entity(os.environ['BILLING_TABLE'],
                                          ip_entry['PartitionKey'],
                                          ip_entry['RowKey'])
    except:
        pass
    if not ip_row:
        ip_entry['count'] = 1
        table_service.insert_entity(table_name=os.environ['BILLING_TABLE'],
                                    entity=ip_entry)
        ip_row = ip_entry
    else:
        lastdatetime = datetime.strptime(ip_row['time'], "%d/%m/%Y %H:%M:%S")
        currdatetime = datetime.strptime(ip_entry['time'], "%d/%m/%Y %H:%M:%S")
        tdelta = currdatetime - lastdatetime
        if tdelta.days < 1 and ip_row['count'] > max_from_single_ip:
            return True  # throttle this entry..
        elif tdelta.days > 0:  #over 1 day has passed, update the count to 1 and reset time
            ip_row['count'] = 1
            ip_row['time'] = currdatetime.strftime("%d/%m/%Y %H:%M:%S")
            table_service.update_entity(os.environ['BILLING_TABLE'], ip_row)
        else:  # less than 1 day but count is < max_from_single_ip, update the count
            ip_row['count'] = ip_row['count'] + 1
            table_service.update_entity(os.environ['BILLING_TABLE'], ip_row)

    # However we got here, do not throttle
    return False
Esempio n. 2
0
class AzureCosmosDb(TableStorage):
    def __init__(self, config: AzureCosmosDbConfig):
        self._tableService = TableService(account_name=config.account_name,
                                          account_key=config.account_key)
        self._tableName = config.table_name

    def check_entry_exists(self, entry):
        try:
            self.query(entry['PartitionKey'], entry['RowKey'])
            return True
        except:
            return False

    def write(self, entry):
        prepared = entry_storage.EntryOperations.prepare_entry_for_insert(
            entry)

        if not self.check_entry_exists(prepared):
            self._tableService.insert_entity(self._tableName, prepared)
        else:
            self._tableService.update_entity(self._tableName, prepared)

    def query(self, partitionkey, rowkey):
        task = self._tableService.get_entity(self._tableName, partitionkey,
                                             rowkey)
        return task

    def delete(self, partitionkey, rowkey):
        self._tableService.delete_entity(self._tableName, partitionkey, rowkey)
Esempio n. 3
0
def main(myblob: func.InputStream, outputblob: func.Out[func.InputStream]):
    logging.info(f"Python blob trigger function processed blob \n"
                 f"Name: {myblob.name}\n"
                 f"Blob Size: {myblob.length} bytes")
    ##Loading the MNIST Model
    weights = pickle.load(open(os.environ['ModelWeightsPath'], 'rb'))
    json = pickle.load(open(os.environ['ModelJSONPath'], 'rb'))
    model = model_from_json(json)
    model.set_weights(weights)
    #Reading image stream
    imgStream = myblob.read(-1)
    #Converting to image object
    nparr = np.fromstring(imgStream, np.uint8)
    img = cv2.imdecode(nparr, cv2.IMREAD_GRAYSCALE)
    img = img.reshape(1, 28, 28, 1)
    logging.info(img.shape)
    #Predicting on image
    prediction = model.predict(img)
    #Maximum probability prediction
    logging.info(np.argmax(prediction))
    #Connection to table storage
    table_service = TableService(
        connection_string=os.environ['AzureWebJobsStorage'])
    #Storing in table storage (adding 1 hour to convert servertime to my timezone)
    values = [
        os.path.basename(myblob.name),
        str(datetime.datetime.now() + datetime.timedelta(hours=1)),
        str(np.argmax(prediction))
    ]
    names = ["PartitionKey", "RowKey", "Prediction"]
    dictionary = dict(zip(names, values))
    table_service.insert_entity('imagedata', dictionary)

    #Saving to output container - Shows how to save a possible processed image
    outputblob.set(imgStream)
Esempio n. 4
0
def add_pipeline(account_name,
                 account_key,
                 table_name,
                 partition_name,
                 filter_name,
                 filter_value,
                 name1,
                 value1,
                 name2=None,
                 value2=None,
                 name3=None,
                 value3=None):
    print("Adding a new entry")
    new_entry = {}
    new_entry["RowKey"] = generate_row_key()
    new_entry["PartitionKey"] = partition_name
    new_entry[filter_name] = filter_value
    new_entry[name1] = value1.lower()
    if name2 != None and value2 != None:
        new_entry[name2] = value2.lower()
    if name3 != None and value3 != None:
        new_entry[name3] = value3.lower()
    print(new_entry)
    table_service = TableService(account_name=account_name,
                                 account_key=account_key)
    table_service.insert_entity(table_name, new_entry)
def store_predictions_in_table(predictions,
                               times,
                               table_name="predictedSoilMoistureMessages"):

    # Connect to account
    table_service = TableService(
        account_name='soilhumiditydata293s',
        account_key=
        '4PSsEO1xBAIdq3/MppWm+t6eYHi+CWhVn6xNZ6i4mLVgm50K8+NK6lA94v8MxG0bvVEfYCvsv1suxCyCnUYd0A=='
    )

    # Delete existing table predictions
    table = table_service.query_entities(table_name)
    for entry in table:
        table_service.delete_entity(table_name, entry['PartitionKey'],
                                    entry['RowKey'])  #'tasksSeattle', '001')

    # Store values in table
    for i in range(len(predictions)):
        new_entity = Entity()
        new_entity.PartitionKey = datetime.strftime(times[i],
                                                    "%Y-%m-%d %H:%M:%S")
        new_entity.RowKey = str(i)
        new_entity['soilmoistureprediction'] = str(predictions[i])

        table_service.insert_entity(table_name, new_entity)
def main(req: func.HttpRequest) -> func.HttpResponse:
    logging.info('Python HTTP trigger function processed a request.')

    with open('config.json', 'r') as config_file:
        config_data = json.load(config_file)

    connectionstring = config_data["connectionstring"]
    table_service = TableService(connection_string=connectionstring)

    table = config_data["table"]
    tableExists = table_service.create_table(table)

    courses = readcsv()

    for item in courses:
        # print(item)
        task = Entity()
        task.PartitionKey = item.subject
        task.RowKey = item.instructor
        task.lectures = item.lectures
        task.labs = item.labs
        task.points = item.points
        task.isWeekend = item.isWeekend
        table_service.insert_entity(table, task)

    return func.HttpResponse(
        "Cosmos DB - Table API example database is created.")
class AzureTableProvider:
    def __init__(self, account_name, key, table_name):
        self.target_table = table_name

        if not account_name or not key:
            raise Exception('Account or key not specified')

        self.table_service = TableService(account_name=account_name, account_key=key)
        self.table_service.create_table(self.target_table)

    def get_all(self):
        return self.table_service.query_entities(self.target_table)

    def remove(self, item):
        query_str = "Link eq '%s'" % item
        tasks = self.table_service.query_entities(self.target_table, filter=query_str)
        if any(tasks):
            for task in tasks:
                self.table_service.delete_entity(self.target_table, task.PartitionKey, task.RowKey)
                return True
        return False

    def add(self, item):
        track = {
            'PartitionKey': 'MusicBotEntry',
            'RowKey': str(uuid.uuid4()),
            'Link': item
        }
        self.table_service.insert_entity(self.target_table, track)
Esempio n. 8
0
def update_pipeline(account_name,
                    account_key,
                    table_name,
                    partition_name,
                    filter_name,
                    filter_value,
                    name1,
                    value1,
                    name2=None,
                    value2=None,
                    name3=None,
                    value3=None,
                    name4=None,
                    value4=None):
    table_service = TableService(account_name=account_name,
                                 account_key=account_key)
    entities = table_service.query_entities(table_name,
                                            filter=filter_name + " eq '" +
                                            filter_value + "'")

    count = 0
    for entity in entities:
        count = count + 1
        add = False
        if name1 in entity and entity[name1] != value1.lower():
            add = True
        entity[name1] = value1.lower()

        if name2 != None and value2 != None:
            if name2 in entity and entity[name2] != value2.lower():
                add = True
            entity[name2] = value2.lower()

        if name3 != None and value3 != None:
            if name3 in entity and entity[name3] != value3.lower():
                add = True
            entity[name3] = value3.lower()

        if name4 != None and value4 != None:
            if name4 in entity and entity[name4] != value4.lower():
                add = True
            entity[name4] = value4.lower()

        if add == False:
            table_service.update_entity(table_name, entity)
            print("Updating existing entry")
        else:
            guid = generate_row_key()
            entity["RowKey"] = guid
            table_service.insert_entity(table_name, entity)
            print("Adding new entry since one already existed")
        print(entity)
        break

    if count == 0:
        add_pipeline(account_name, account_key, table_name, partition_name,
                     filter_name, filter_value, name1, value1, name2, value2,
                     name3, value3)
    print("Done")
Esempio n. 9
0
class InputHandler:
    def __init__(self, env_vars):
        # Blob service init
        self.blob_service_client = BlobServiceClient.from_connection_string(
            env_vars.get("BLOB_CONNECTION_STRING"))
        self.blob_container_name = env_vars.get("BLOB_CONTAINER_NAME")
        # Table service init
        self.table_service = TableService(
            os.environ.get("DB_ACCOUNT_NAME"),
            account_key=env_vars.get("TABLE_KEY"))
        self.table_name = env_vars.get("DB_TABLE_NAME")

    def upload_picture_to_blob(self, img, img_name):
        # Create connection to blob storage
        blob_client = self.blob_service_client.get_blob_client(
            container=self.blob_container_name, blob=img_name)

        # Upload image to blob
        blob_client.upload_blob(img)

    def upload_json_prices(self, prices):
        error = False
        for val in prices:  # Loop through new_prices and add to database
            entry = Entity()
            try:
                entry.PartitionKey = val["county"]
                entry.RowKey = str(uuid.uuid4())  # Generate new random UUID
                entry.price = val["price"]
                entry.location = val["location"]
                if (val["fueltype"] == "diesel"
                        or val["fueltype"] == "gasoline"):
                    entry.fueltype = val["fueltype"]
                else:
                    entry.fueltype = "unknown"
                self.table_service.insert_entity(self.table_name, entry)
            except AttributeError:
                error = True
                print("Error trying to parse JSON object: " + val)

        if error:
            return "Something went wrong. Try check your syntax"
        else:
            return "Inserted successfully"

    def upload_price(self, price, fuel_type, location):
        entry = Entity()
        try:
            entry.PartitionKey = "trondelag"
            entry.RowKey = str(uuid.uuid4())  # Generate new random UUID
            entry.price = price
            entry.location = location
            entry.fueltype = fuel_type
            self.table_service.insert_entity(self.table_name, entry)
        except AttributeError:
            print("Error trying to upload: Fuel type '" + fuel_type +
                  "' Price '" + price + "'")
            return "Something went wrong. Try check your syntax"
        return "Price inserted successfully"
Esempio n. 10
0
def main(req: func.HttpRequest) -> func.HttpResponse:
    logging.info('Python HTTP Submit trigger received a request')

    logging.debug('Creating blob service')
    table_service = TableService(
        account_name=os.getenv('AZURE_STORAGE_ACCOUNT'),
        account_key=os.getenv('AZURE_STORAGE_ACCESS_KEY')
    )

    headers_dict = {
            "Access-Control-Allow-Credentials": "true",
            "Access-Control-Allow-Origin": "*",
            "Access-Control-Allow-Methods": "Post"
    }
    schema = getjob_schema.GetJobSchema()
    try:
        getjob_dict = schema.loads(req.get_body())
    except ValidationError:
        return func.HttpResponse(f'Failed to validate getjob schema',
                                 headers=headers_dict,
                                 status_code=400
                                 )
    if not getjob_dict['num_messages'] == 1:
        return func.HttpResponse(f'Number of messages should be 1',
                                 headers=headers_dict,
                                 status_code=400
                                 )
    table_name = os.getenv('AZURE_TABLE_NAME')
    entity = None
    entities = table_service.query_entities(table_name, filter="PartitionKey eq 'await'")
    for entity in entities:
        break
    if not entity:
        return func.HttpResponse(f'No job found',
                                 headers=headers_dict,
                                 status_code=400
                                 )
    message = {}
    message['crop'] = entity.crop
    message['geometry'] = json.loads(entity.geometry)
    message['irrigated'] = entity.irrigated
    message['guid'] = entity.RowKey
    message['area_name'] = entity.area_name
    message['planting_date'] = entity.planting_date
    message['fraction'] = entity.fraction

    table_service.delete_entity(table_name, entity.PartitionKey, entity.RowKey)
    entity.PartitionKey = 'processing'

    table_service.insert_entity(table_name, entity)

    return func.HttpResponse(json.dumps(message),
                             headers=headers_dict,
                             mimetype='application/json'
                             )
def azureTable():
    # table_service = TableService(account_name='', account_key='')
    table_service = TableService(
        connection_string='DefaultEndpointsProtocol=https;AccountName=sauokgp;AccountKey=113mdwUqIiqt4K2HonK80HakIOplxYZINmQME5KB1IZfP+v3JHZK64wpoTP5NBFaG0MaO/TVqA0nW4KuCINTow==;EndpointSuffix=core.windows.net')

    #table_service.create_table('bitcoinData')
    i = 0
    for r in ref:
        bitcoin = {'PartitionKey': Bitcoinz, 'RowKey': str(r), 'Title': title[i], 'priority': 200}
        table_service.insert_entity('HelpPage', bitcoin)
        i += 1
Esempio n. 12
0
def tableStorage(table_name, partition_key, row_key, hins_processed, timesaved,
                 time_by_system, time_by_user, requests):

    try:
        table_service = TableService(
            account_name=config.AZURE['STORAGE_ACCOUNT_NAME'],
            account_key=config.AZURE['STORAGE_ACCOUNT_KEY'])

        entity = {
            'PartitionKey': partition_key,
            'RowKey': row_key,
            'HinsProcessed': hins_processed,
            'TimeSaved': timesaved,
            'TimeBySystem': time_by_system,
            'TimeByUser': time_by_user,
            'Requests': requests
        }

        if not table_service.exists(table_name, timeout=None):
            table_service.create_table(table_name, fail_on_exist=False)

        try:
            table_service.insert_entity(table_name, entity)
            print("Entity Doesn't Exist")
            print("Creating Entity\n")
        except Exception as e:
            print("Entity Exists")
            print("Updating entity\n")

            currentEntity = table_service.get_entity(table_name, partition_key,
                                                     row_key)
            tempHinProcessed = currentEntity.HinsProcessed + hins_processed
            tempTimeSaved = currentEntity.TimeSaved + timesaved
            tempTimeBySystem = currentEntity.TimeBySystem + time_by_system
            tempTimeByUser = currentEntity.TimeByUser + time_by_user
            tempRequest = currentEntity.Requests + requests

            entity = {
                'PartitionKey': partition_key,
                'RowKey': row_key,
                'HinsProcessed': tempHinProcessed,
                'TimeSaved': tempTimeSaved,
                'TimeBySystem': tempTimeBySystem,
                'TimeByUser': tempTimeByUser,
                'Requests': tempRequest
            }

            table_service.update_entity(table_name,
                                        entity,
                                        if_match='*',
                                        timeout=None)

    except Exception as e:
        print(e)
Esempio n. 13
0
def main(req: func.HttpRequest) -> func.HttpResponse:
    logging.info('Python HTTP trigger function processed a request.')

    CF.BaseUrl.set("https://emotiontrack.cognitiveservices.azure.com/face/v1.0")
    CF.Key.set("4a1e0d41a8494d71ac0b9028464d8e62")
    
    rowkey = req.params.get('rowkey')
    if not rowkey:
        logging.error("Missing parameter(s)")
        return func.HttpResponse("Missing one or more parameter.", status_code=400)
    face = req.get_json()
    face_rect = face['faceRectangle']

    table = TableService(connection_string=conn_string)
    if not table:
        logging.error("Failed to connect to the storage")
        return func.HttpResponse("Failed to connect to the storage. Please try again later.", status_code=500)

    test_img = getFaceImage(table, rowkey, face_rect)
    test_imgIO = io.BytesIO()
    test_img.save(test_imgIO, format='JPG')

    entities = table.query_entities(table_name, filter=None)

    isMatch = False
    for entity in entities:
        img = getFaceImage(table, entity.RowKey, entity.rect)
        imgIO = io.BytesIO()
        img.save(imgIO, format='JPG')

        try:
        res = CF.face.verify(test_imgIO, imgIO)
        if res['isIdentical']:
            # update entry
            entity.RowKey = rowkey
            entity.rect = face_rect
            table.update_entity(table_name, entity)

            isMatch = True
            break

    if not isMatch:
        # new entry
        
        entity = Entity()
        entity.PartitionKey = "1"
        entity.RowKey = str(uuid.uuid4())
        entity.rect = face_rect
        
        table.insert_entity(table_name, entity)

    return func.HttpResponse(entity.RowKey, status_code=200)
Esempio n. 14
0
def main(req: func.HttpRequest) -> func.HttpResponse:
    logging.info('Python HTTP Submit trigger received a request')

    logging.debug('Creating blob service')
    table_service = TableService(
        account_name=os.getenv('AZURE_STORAGE_ACCOUNT'),
        account_key=os.getenv('AZURE_STORAGE_ACCESS_KEY'))

    headers_dict = {
        "Access-Control-Allow-Credentials": "true",
        "Access-Control-Allow-Origin": "*",
        "Access-Control-Allow-Methods": "Post"
    }

    schema = submit_schema.SubmitMessageSchema()
    try:
        job_dict = schema.loads(req.get_body())
    except ValidationError:
        error = f'Failed to validate the submit message'
        return func.HttpResponse(error, headers=headers_dict, status_code=400)

    table_name = os.getenv('AZURE_TABLE_NAME')
    table_service.create_table(table_name)
    guid = uuid.uuid4()
    try:
        job_dict = schema.dump(job_dict)
    except ValidationError:
        error = f'Failed to submit job'
        return func.HttpResponse(error, headers=headers_dict, status_code=400)
    entity = Entity()
    entity.PartitionKey = 'await'
    entity.RowKey = str(guid)
    entity.Error = ""
    entity.area_name = job_dict['area_name']
    entity.crop = job_dict['crop']
    entity.planting_date = job_dict['planting_date']
    entity.irrigated = job_dict['irrigated']
    entity.fraction = job_dict['fraction']
    entity.geometry = json.dumps(job_dict['geometry'])
    try:
        table_service.insert_entity(table_name, entity)
    except TypeError:
        error = f'Failed to insert to table'
        return func.HttpResponse(error, headers=headers_dict, status_code=400)

    response_dict = {}
    response_dict['guid'] = guid
    schema = submit_schema.SubmitResponseSchema()
    response_message = schema.dumps(response_dict)
    return func.HttpResponse(response_message,
                             headers=headers_dict,
                             mimetype='application/json')
Esempio n. 15
0
def azureTable():
    # table_service = TableService(account_name='', account_key='')
    table_service = TableService(
        connection_string='')

    #table_service.create_table('bitcoinData')
    i = 0
    for t in time:
        bitcoin = {'PartitionKey': Bitcoinz, 'RowKey': str(t), 'Open': EntityProperty(EdmType.DOUBLE, openz[i]),
                   'Close': EntityProperty(EdmType.DOUBLE, close[i]), 'High': EntityProperty(EdmType.DOUBLE, high[i]),
                   'Low': EntityProperty(EdmType.DOUBLE, low[i]), 'Time': timez[i], 'priority': 200}
        table_service.insert_entity('bitcoinData', bitcoin)
        i += 1
Esempio n. 16
0
def main(event: func.EventHubEvent):
    handler = LoggingHandler(os.environ['APPINSIGHTS_INSTRUMENTATIONKEY'])
    logging.basicConfig(handlers=[ handler ], format='%(levelname)s: %(message)s', level=logging.DEBUG)

    tc = TelemetryClient(os.environ['APPINSIGHTS_INSTRUMENTATIONKEY'])
    tc.track_event("Incoming event")
    tc.flush()

    logging.info('Function triggered to process a message: %s', event)
    logging.info('  body: %s', event.get_body())
    logging.info('  EnqueuedTimeUtc: %s', event.enqueued_time)
    logging.info('  SequenceNumber: %s', event.sequence_number)
    logging.info('  Offset: %s', event.offset)
    logging.info('  Partition: %s', event.partition_key)
    logging.info('  Metadata: %s', event.iothub_metadata)

    table_service = TableService(connection_string=os.environ['AzureTableConnectionString'])

    for datapoint in json.loads(event.get_body()):
        # Expected data format:
        #   {"timestamp": 1564598054, "deviceid": "Node1", "scale": 2, "temperature": 1.1,"weight": 10000}
        if datapoint is not None and 'deviceid' in datapoint and \
           'timestamp' in datapoint and 'scale' in datapoint and \
           'weight' in datapoint:
            logging.debug('  datapoint: %s', (datapoint))
            # deviceid is used as partition key.
            # {timestamp}-{scale} is used as RowKey
            # timestamp and scale number are duplicated as an int columns
            # to keep them searchable.  The rest of the datapoint elements
            # are added as columns as well.
            history = {}
            history['PartitionKey'] = datapoint.pop('deviceid')
            history['RowKey'] = str(datapoint['timestamp']) + '-' + str(datapoint['scale'])
            history.update(datapoint.items())
            logging.debug('history: %s' % (history))
            table_service.insert_entity(TABLE_NAME_HISTORICAL_DATA, history)
            logging.info('Added historical table data: %s', (history))

            # Touch/create the row in the config table for each reported scale with latest weight
            configupdate = {}
            configupdate['PartitionKey'] = history['PartitionKey']
            configupdate['RowKey'] = str(history['scale'])
            configupdate['weight'] = history['weight']
            if 'temperature' in history:
                configupdate['temperature'] = history['temperature']
            logging.info('config update: %s' % (configupdate))
            logging.info('Writing to table: %s' % (TABLE_NAME_CONFIGURATION))
            table_service.insert_or_merge_entity(TABLE_NAME_CONFIGURATION, configupdate)
            logging.info('Updated configuration table entry: %s', (configupdate))
        else:
            logging.info('  Invalid datapoint: %s', (datapoint))
Esempio n. 17
0
def store_in_table_storage(account_name: str, account_key: str, data: dict):
    protocol: str = 'https'
    table_endpoint: str = 'https://expensely-db.table.cosmos.azure.com:443/'

    table_service = TableService(account_name=account_name,
                                 account_key=account_key)

    connection_string = "DefaultEndpointsProtocol={};AccountName={};AccountKey={};TableEndpoint={};".format(
        protocol, account_name, account_key, table_endpoint)

    table_service = TableService(endpoint_suffix="table.cosmos.azure.com",
                                 connection_string=connection_string)

    table_service.insert_entity('prod', data)
Esempio n. 18
0
def main(req: func.HttpRequest) -> func.HttpResponse:
    logging.info('Python HTTP trigger function processed a request.')
    KeyVault_DNS = os.environ["KeyVault_DNS"]
    SecretName = os.environ["SecretName"]

    table_name = req.headers.get('name')

    value = req.get_json()

    if table_name:
        try:  # Try with managed identity, otherwise to with Service Principal
            creds = ManagedIdentityCredential()
            client = SecretClient(vault_url=KeyVault_DNS, credential=creds)
            retrieved_secret = client.get_secret(SecretName)
        except:
            creds = ClientSecretCredential(
                client_id=os.environ["SP_ID"],
                client_secret=os.environ["SP_SECRET"],
                tenant_id=os.environ["TENANT_ID"])
            client = SecretClient(vault_url=KeyVault_DNS, credential=creds)
            retrieved_secret = client.get_secret(SecretName)

        table_service = TableService(connection_string=retrieved_secret.value)

        if table_service.exists(table_name):
            if 'PartitionKey' not in value.keys():  #This is mandatory
                value['PartitionKey'] = 'reference'

            if 'RowKey' not in value.keys():  #This is mandatory too
                value['RowKey'] = '001'
            try:
                table_service.update_entity(table_name=table_name,
                                            entity=value)
            except:
                table_service.insert_entity(table_name=table_name,
                                            entity=value)
        else:
            ret = dict()
            ret['result'] = "Please create the table!"
            return func.HttpResponse(json.dumps(ret), status_code=400)
        ret = dict()
        ret['result'] = "Success"
        return func.HttpResponse(json.dumps(ret), status_code=200)

    else:
        ret = dict()
        ret['result'] = "Please pass a name!!"
        return func.HttpResponse(json.dumps(ret), status_code=400)
Esempio n. 19
0
def add_result(results_entity):
    """ Adds a new result to the ``rosiepi`` storage table.

    :param: results_entity: A ``azure.cosmodb.table.models.Entity`` object
                            containing the results to add to the storage
                            table. ``Entity`` object can be retrieved from
                            ``lib/result.py::Result.results_to_table_entity()``.

    :return: The entity's Etag if successful. None if failed.
    """

    response = None
    if isinstance(results_entity, Entity):
        table = TableService(connection_string=os.environ['APP_STORAGE_CONN_STR'])

        try:
            response = table.insert_entity('rosiepi', results_entity)
        except Exception as err:
            logging.info(f'Failed to add result to rosiepi table. Error: {err}\nEntity: {results_entity}')
    else:
        logging.info(
            'Result not added to rosiepi table. Supplied result was an incorrect '
            'type. Should be azure.cosmodb.table.models.Entity. Supplied type: '
            f'{type(results_entity)}'
        )

    return response
Esempio n. 20
0
class TableSaver:
    def __init__(self, account_name: str, sas_token: str, table_name: str):
        self.account_name = account_name
        self.table_name = table_name
        self.sas_token = sas_token
        self.table_service = TableService(account_name=account_name,
                                          sas_token=sas_token)

    def save_count(self, fn: str, count: int):
        """Save a manual count of cars for image `fn` to Azure table."""
        new_count = {
            'PartitionKey': Path(fn).name,
            'RowKey': str(time.time()),
            'count': count
        }
        self.table_service.insert_entity(self.table_name, new_count)
Esempio n. 21
0
def writeResults(results_df) :
    # change the type of any non-string columns to string
    for i in [col for col in results_df.columns if type(col) != str] :
        results_df[col] = results_df[col].astype(str)
    results_df["PartitionKey"] = results_df["season"] + "-" + results_df["week"]
    results_df["RowKey"] = results_df["home_team"] + "-at-" + results_df["away_team"]
    print("OK")
    table_service = TableService(account_name='predictomatic', account_key='H2LyENOrQJ+QxAHMt3eU6+n4/VMJ3wBFzL9j/eAwf6QYQfkOJKV2r+ArDKYkz1/tToztH/Wp+kDEvfhRlUqaiQ==')
    for key, value in results_df.iterrows() :
        linq = "PartitionKey eq \'" + value["PartitionKey"] + "\\ and RowKey eq \\\'" + value["RowKey"] + "\\\'"
        print(linq)
        if len(list(table_service.query_entities("results", filter=linq))) == 0 :
            table_service.insert_entity("results", value.to_dict())



#with TableService(account_name='predictomatic', account_key='H2LyENOrQJ+QxAHMt3eU6+n4/VMJ3wBFzL9j/eAwf6QYQfkOJKV2r+ArDKYkz1/tToztH/Wp+kDEvfhRlUqaiQ==') as table_service :
Esempio n. 22
0
def handle_RSVP():
    print('User Code Is: {}'.format(request.form['userCode']))
    table_service = TableService(account_name=app.config['StorageName'],
                                 account_key=app.config['StorageKey'])
    now = datetime.now()
    time = now.strftime("%m-%d-%Y %H-%M-%S")
    rsvp = {
        'PartitionKey': 'rsvp',
        'RowKey': time,
        'GroupID': request.form['userCode'],
        'comments': request.form['comment'],
        'Status': request.form['action']
    }
    print(rsvp)
    table_service.insert_entity('weddingrsvptable', rsvp)
    redirectlink = '/Thankyou/{}'.format(request.form['userCode'])
    return redirect(redirectlink)
Esempio n. 23
0
def main(req: func.HttpRequest) -> func.HttpResponse:
    logging.info('Python HTTP Submit trigger received a request')

    logging.debug('Creating blob service')

    headers_dict = {
            "Access-Control-Allow-Credentials": "true",
            "Access-Control-Allow-Origin": "*",
            "Access-Control-Allow-Methods": "Post"
    }
    schema = put_schema.DoneSchema()
    try:
        done_dict = schema.loads(req.get_body())
    except ValidationError:
        error = f'Failed to validate the done message'
        return func.HttpResponse(error,
                                 headers=headers_dict,
                                 status_code=400
                                 )

    table_name = os.getenv('AZURE_TABLE_NAME')
    table_service = TableService(
        account_name=os.getenv('AZURE_STORAGE_ACCOUNT'),
        account_key=os.getenv('AZURE_STORAGE_ACCESS_KEY')
    )
    try:
        entity = table_service.get_entity(table_name, 'processing', done_dict['guid'])
    except AzureMissingResourceHttpError:
        error = f'Failed to put done message'
        return func.HttpResponse(error,
                                 headers=headers_dict,
                                 status_code=400
                                 )
    if not done_dict['error']:
        entity.Error = ""
    else:
        entity.Error = done_dict['error']
    table_service.delete_entity(table_name, entity.PartitionKey, entity.RowKey)
    entity.PartitionKey = 'done'
    table_service.insert_entity(table_name, entity)

    return func.HttpResponse('Message was successfully inserted into Done queue',
                             headers=headers_dict
                             )
Esempio n. 24
0
def main(msg: func.QueueMessage) -> None:
    body = msg.get_body().decode('utf-8')
    body_json = json.loads(body)
    table_service = TableService(connection_string=os.environ["TableStorage"])

    logging.info('Python queue trigger function processed a queue item: %s',
                 msg.get_body().decode('utf-8'))

    task = Entity()
    task.PartitionKey = body_json["party"]
    task.RowKey = str(uuid.uuid4())
    task.count = body_json["count"]
    task.electoralPlace = body_json["electoralPlace"]
    task.electoralUnit = body_json["electoralUnit"]

    table_service.insert_entity('votes', task)

    # datetime object containing current date and time
    now = datetime.now()
    logging.info(now.strftime("%d/%m/%Y %H:%M:%S") + ' - Processing done')
Esempio n. 25
0
def azureTable():
    # table_service = TableService(account_name='', account_key='')
    table_service = TableService(
        connection_string=
        'DefaultEndpointsProtocol=https;AccountName=sauokgp;AccountKey=113mdwUqIiqt4K2HonK80HakIOplxYZINmQME5KB1IZfP+v3JHZK64wpoTP5NBFaG0MaO/TVqA0nW4KuCINTow==;EndpointSuffix=core.windows.net'
    )

    #table_service.create_table('bitcoinData')
    i = 0
    for t in time:
        bitcoin = {
            'PartitionKey': Bitcoinz,
            'RowKey': str(t),
            'Open': EntityProperty(EdmType.DOUBLE, openz[i]),
            'Close': EntityProperty(EdmType.DOUBLE, close[i]),
            'High': EntityProperty(EdmType.DOUBLE, high[i]),
            'Low': EntityProperty(EdmType.DOUBLE, low[i]),
            'Time': timez[i],
            'priority': 200
        }
        table_service.insert_entity('bitcoinData', bitcoin)
        i += 1
Esempio n. 26
0
def main(req: func.HttpRequest) -> func.HttpResponse:
    logging.info('Starting insert row.')

    table_name = req.headers.get('name')
    if not table_name:  #If name wasnt added as header, search for it in the parameters
        table_name = req.params.get('name')

    value = req.get_json()

    if table_name:
        retrieved_secret = getConnectionString()

        table_service = TableService(connection_string=retrieved_secret.value)

        if table_service.exists(table_name):
            if 'PartitionKey' not in value.keys():  #This is mandatory
                value['PartitionKey'] = 'reference'

            if 'RowKey' not in value.keys():  #This is mandatory too
                value['RowKey'] = '001'
            try:
                table_service.update_entity(table_name=table_name,
                                            entity=value)
            except:
                table_service.insert_entity(table_name=table_name,
                                            entity=value)
        else:
            ret = dict()
            ret['result'] = "Please create the table!"
            return func.HttpResponse(json.dumps(ret), status_code=400)
        ret = dict()
        ret['result'] = "Success"
        return func.HttpResponse(json.dumps(ret), status_code=200)

    else:
        ret = dict()
        ret['result'] = "Please pass a name!!"
        return func.HttpResponse(json.dumps(ret), status_code=400)
Esempio n. 27
0
class AzureTableStorageHandler(logging.Handler):
    """
    A handler class which writes formatted logging records to Azure Table Storage.
    """
    def __init__(self, account_name, account_key, table_name, *, level=logging.NOTSET):
        """
        Setup TableService and the specified table for logging.
        """
        super().__init__(level=level)
        self.table_service = TableService(account_name=account_name, account_key=account_key)
        self.table_name = table_name
        if not self.table_service.exists(self.table_name):
            self.table_service.create_table(self.table_name)
        self.formatter = logging.Formatter("%(message)s")
        self.executor = ThreadPoolExecutor(max_workers=1, thread_name_prefix="AzHndlr")
        self.epoch_max = datetime(MAXYEAR, 12, 31, 23, 59, 59, 999999, timezone.utc).timestamp()

    def insert_log(self, record):
        """
        Insert log to Azure Table Storage.
        """
        entity = {
            "PartitionKey": record.name,
            "RowKey": str(self.epoch_max - time()),
            "LocalTimestamp": self.formatter.formatTime(record),
            "LevelName": record.levelname,
            "Level": record.levelno,
            "Message": self.format(record)
        }
        self.table_service.insert_entity(self.table_name, entity)

    def emit(self, record):
        """
        Emit a record.

        This method just submit the logging task to worker thread and return immediately.
        """
        self.executor.submit(self.insert_log, record)
Esempio n. 28
0
class AzureTable():
    def __init__(self, account_name, account_key):
        self.table_service = TableService(account_name=account_name,
                                          account_key=account_key)

    def create_table(self, table_name):
        return self.table_service.create_table(table_name)

    def exists_table(self, table_name):
        return self.table_service.exists(table_name)

    def insert_or_replace_entity(self, table_name, partition_key, row_key,
                                 **kwargs):
        try:
            entity = self.table_service.get_entity(table_name, partition_key,
                                                   row_key)
        except Exception:
            # Insert a new entity
            entity = {'PartitionKey': partition_key, 'RowKey': row_key}

        for (k, v) in kwargs.items():
            entity[k] = v

        return self.table_service.insert_or_replace_entity(table_name, entity)

    def insert_or_replace_entity2(self, table_name, entity):
        return self.table_service.insert_or_replace_entity(table_name, entity)

    def insert_entity(self, table_name, entity):
        return self.table_service.insert_entity(table_name, entity)

    def update_entity(self, table_name, entity):
        return self.table_service.update_entity(table_name, entity)

    def get_entity(self, table_name, partition_key, row_key):
        return self.table_service.get_entity(table_name, partition_key,
                                             row_key)

    def delete_entity(self, table_name, partition_key, row_key):
        self.table_service.delete_entity(table_name, partition_key, row_key)

    def delete_table(self, table_name):
        return self.table_service.delete_table(table_name)

    def get_entities(self, table_name, partition_key):
        filter = "PartitionKey eq '{0}'".format(partition_key)
        return self.table_service.query_entities(table_name, filter)
Esempio n. 29
0
def create():
    error = None
    print(request)
    print(request.data)
    print(request.form)
    print(request.files['artPhoto'])
    if 'artPhoto' not in request.files:
        error = 'artPhoto is required.'
    elif request.files["artPhoto"].filename == "":
        error = "filename required"
    elif not allowed_file(request.files["artPhoto"].filename):
        error = "not allowed file"
    elif 'artName' not in request.form:
        error = 'artName is required.'
    elif 'artistName' not in request.form:
        error = 'artistName is required.'
    elif 'curatorName' not in request.form:
        error = 'curatorName is required.'
    elif 'curatorNotes' not in request.form:
        error = 'curatorNotes is required.'
    else:
        newId = str(uuid.uuid1())
        artPhoto = request.files["artPhoto"]
        print(artPhoto)
        fileExtension = artPhoto.filename.rsplit('.', 1)[1]
        filename = secure_filename(newId + "_photo." + fileExtension)
        print(filename)
        # artPhoto.save(os.path.join(current_app.config['UPLOAD_FOLDER'], filename))

        connect_str = "DefaultEndpointsProtocol=https;AccountName=artsharestorage;AccountKey=zCN3F1TuFjeSw8alIDF0bcvSQoLe5tJHRcavpRKZ31JUUkPuHLtVSqP9WJ3oQU7ty/ZAisWl8CDcFtZHsZ15MQ==;EndpointSuffix=core.windows.net"
        blob_service_client = BlobServiceClient.from_connection_string(connect_str)
        container_name = "artuploadphotos"
        blob_client = blob_service_client.get_blob_client(container=container_name, blob=filename)
        # print(artPhoto.read())
        # print(artPhoto.stream.read())
        blob_client.upload_blob(artPhoto.stream.read())




        print(blob_client)
        # apiObject.blobUri = "https://" + azureStorageAccountName + ".blob.core.windows.net/" + exports.blobContainer + "/" + apiObject.id + "." + fileExtension;
        print(blob_client.get_blob_properties())
        photoUrl = blob_client.url
        print(photoUrl)
        artName = request.form["artName"]
        print(artName)
        artistName = request.form["artistName"]
        print(artistName)
        curatorName = request.form["curatorName"]
        print(curatorName)
        curatorNotes = request.form["curatorNotes"]
        print(curatorNotes)
        
        table_service = TableService(account_name='artsharestorage', account_key='zCN3F1TuFjeSw8alIDF0bcvSQoLe5tJHRcavpRKZ31JUUkPuHLtVSqP9WJ3oQU7ty/ZAisWl8CDcFtZHsZ15MQ==')
        artupload = {
            'PartitionKey': PARITION_KEY,
            'RowKey': newId,
            'artName': artName,
            'artistName': artistName,
            'curatorName': curatorName,
            'curatorNotes': curatorNotes,
            'photoUrl': photoUrl
        }
        table_service.insert_entity('artuploads', artupload)

        # Your Account SID from twilio.com/console
        account_sid = "ACce268cc948742f9dcec8827d91f45965"
        # Your Auth Token from twilio.com/console
        auth_token  = "1d407a9350de8d6260cfb5ea64969299"

        client = Client(account_sid, auth_token)

        message = "New photo uploaded: " + photoUrl
        message = client.messages.create(
            to="+12245672736", 
            from_="+19382010795",
            body=message)
        
        # db = get_db()
        # cur = db.cursor()
        # cur.execute(
        #         'INSERT INTO artDisplay (upload_name, art_name, artist_name, curator_name, curator_notes)'
        #         ' VALUES (?,?,?,?,?)',
        #         (filename,artName,artistName,curatorName,curatorNotes,)
        #     )
        # createdId = cur.lastrowid
        
        # error = None
        # artDisplay = cur.execute(
        #     'SELECT * FROM artDisplay WHERE id = ?', (createdId,)
        # ).fetchone()
        # db.commit()
        # cur.close()
        if artupload is None:
            error = 'unknown'
        else:
            return to_json(artupload)
    return { 'error' : error }
Esempio n. 30
0
class AzureTable(object):
    def __init__(self, account_name: str, account_key: str, table_name: str,
                 partition_key_field: str, clustering_key_field: str):
        self.table = TableService(account_name=account_name,
                                  account_key=account_key)
        self.table_name = self.table_name
        self.partition_key_field = partition_key_field
        self.clustering_key_field = clustering_key_field

    @property
    def partition_key_name(self) -> str:
        return 'PartitionKey'

    @property
    def clustering_key_name(self) -> str:
        return 'RowKey'

    def get_payload(self, payload: dict):
        item = deepcopy(payload)
        partition_key = payload.get(self.partition_key_field)
        clustering_key = payload.get(self.clustering_key_field)
        if partition_key is None:
            raise PartitionKeyNotFoundError(
                'payload={} does not have a partition key')
        if clustering_key is None:
            raise ClusteringKeyNotFoundError(
                'payload={} does not have a clustering key')

        item.update({
            self.partition_key_name: partition_key,
            self.clustering_key_name: clustering_key
        })

        return item

    def create(self):
        return self.table.create_table(self.table_name)

    def insert(self, item: dict):
        return self.table.insert_entity(self.table_name,
                                        self.get_payload(item))

    def update(self, item: dict):
        pass

    def upsert(self, item: dict):
        pass

    def delete(self, key: str):
        pass

    def read(self, key: str):
        pass

    def insert_batch(self, items: list):
        batch = TableBatch()
        for item in items:
            batch.insert_entity(self.get_payload(item))

        return self.table.commit_batch(self.table_name, batch)

    def get(self, partition_key: str, clustering_key: str):
        return self.table.get_entity(self.table_name, partition_key,
                                     clustering_key)

    def get_by_partition(self, partition_key: str) -> list:
        return self.table.query_entities(self.table_name,
                                         filter="{} eq '{}'".format(
                                             self.partition_key_name,
                                             partition_key))
# TableService
# table_service = TableService(account_name='myaccount', account_key='mykey')

# CosmosDB
table_service = TableService(endpoint_suffix="table.cosmos.azure.com", connection_string=the_connection_string)
#table_service.set_proxy("myproxy", 8888)

print("create table...")
table_service.create_table('tasktable')

# dictionary (read from JSON)
print("creating task 001...")
task = {'PartitionKey': 'tasksSeattle', 'RowKey': '001',
        'description': 'Take out the trash', 'priority': 200}
table_service.insert_entity('tasktable', task)

print("Create task through an Entity object...")
task = Entity()
task.PartitionKey = 'tasksSeattle'
task.RowKey = '002'
task.description = 'Wash the car'
task.priority = 100
table_service.insert_entity('tasktable', task)

# update
print("Update task 001...")
task = {'PartitionKey': 'tasksSeattle', 'RowKey': '001',
        'description': 'Take out the garbage', 'priority': 250}
table_service.update_entity('tasktable', task)