Example #1
0
def main(myblob: func.InputStream, outputblob: func.Out[func.InputStream]):
    logging.info(f"Python blob trigger function processed blob \n"
                 f"Name: {myblob.name}\n"
                 f"Blob Size: {myblob.length} bytes")
    ##Loading the MNIST Model
    weights = pickle.load(open(os.environ['ModelWeightsPath'], 'rb'))
    json = pickle.load(open(os.environ['ModelJSONPath'], 'rb'))
    model = model_from_json(json)
    model.set_weights(weights)
    #Reading image stream
    imgStream = myblob.read(-1)
    #Converting to image object
    nparr = np.fromstring(imgStream, np.uint8)
    img = cv2.imdecode(nparr, cv2.IMREAD_GRAYSCALE)
    img = img.reshape(1, 28, 28, 1)
    logging.info(img.shape)
    #Predicting on image
    prediction = model.predict(img)
    #Maximum probability prediction
    logging.info(np.argmax(prediction))
    #Connection to table storage
    table_service = TableService(
        connection_string=os.environ['AzureWebJobsStorage'])
    #Storing in table storage (adding 1 hour to convert servertime to my timezone)
    values = [
        os.path.basename(myblob.name),
        str(datetime.datetime.now() + datetime.timedelta(hours=1)),
        str(np.argmax(prediction))
    ]
    names = ["PartitionKey", "RowKey", "Prediction"]
    dictionary = dict(zip(names, values))
    table_service.insert_entity('imagedata', dictionary)

    #Saving to output container - Shows how to save a possible processed image
    outputblob.set(imgStream)
Example #2
0
def update_pipeline(account_name, account_key, table_name, partition_name, filter_name, filter_value, name1, value1, name2=None, value2=None, name3=None, value3=None, name4=None, value4=None, name5=None, value5=None):
    table_service = TableService(account_name=account_name, account_key=account_key)
    entities = table_service.query_entities(table_name, filter=filter_name + " eq '"+ filter_value + "'")

    count = 0
    for entity in entities:
        count = count + 1
        add = False
        if name1 in entity and entity[name1] != value1.lower():
            add = True
        entity[name1] = value1.lower()

        if name2 != None:
            if name2 in entity and value2 != None and entity[name2] != value2.lower():
                add = True
            entity[name2] = value2.lower() if value2 != None else None
        
        if name3 != None:
            if name3 in entity and value3 != None and entity[name3] != value3.lower():
                add = True
            entity[name3] = value3.lower() if value3 != None else None

        if name4 != None:
            if name4 in entity and and value4 != None entity[name4] != value4.lower():
                add = True
            entity[name4] = value4.lower() if value4 != None else None
        
        if name5 != None:
            if name5 in entity and and value5 != None entity[name5] != value5.lower():
                add = True
            entity[name5] = value5.lower() if value5 != None else None
Example #3
0
    def test_update_releases(self, mock_now, mock_list_containers):
        # Mock time
        mock_now.return_value = pendulum.datetime(year=2020,
                                                  month=5,
                                                  day=1,
                                                  minute=10)

        # Mock containers
        containers_in = make_containers()
        mock_list_containers.return_value = containers_in

        # Mock fetching of containers
        client = MagArchiverClient(account_name=self.account_name,
                                   account_key=self.account_key)
        containers = client.list_containers(last_modified_thresh=1)

        try:
            # Update releases based on containers
            num_updated, num_errors = client.update_releases(containers)
            self.assertEqual(num_updated, 2)
            self.assertEqual(num_errors, 0)
        finally:
            # Clean up
            service = TableService(account_name=self.account_name,
                                   account_key=self.account_key)
            for container in containers:
                service.delete_entity(MagRelease.TABLE_NAME, 'mag',
                                      container.name.replace("mag-", ""))
def main(req: func.HttpRequest) -> func.HttpResponse:
    logging.info("Python HTTP trigger function processed a request.")

    path = req.route_params.get("path")
    if path is None:
        return func.HttpResponse(body=f"The url requested does not exist!",
                                 status_code=400)
    else:

        # connect to table
        storage_account_name = os.environ["STORAGE_ACCOUNT_NAME"]
        storage_account_key = os.environ["STORAGE_ACCOUNT_KEY"]

        table_service = TableService(account_name=storage_account_name,
                                     account_key=storage_account_key)

        # get longurl
        try:
            row = table_service.get_entity("links", "links", path)
        except:
            return func.HttpResponse(
                body=f"The url requested - {path} - does not exist!",
                status_code=400)
        else:
            longurl = row["LongUrl"]

            return func.HttpResponse(headers={"location": longurl},
                                     status_code=302)
Example #5
0
    def __init__(self, azure_config, purge=False):
        super(AzureOperationsStorage, self).__init__()

        if not azure_config:
            raise Exception("No azure table storage configuration provided!")
        self._azure_config = azure_config

        # ensure defaults
        self._azure_config["operation_table"] = self._azure_config.get(
            "operation_table", "operations")
        self._azure_config["address_table"] = self._azure_config.get(
            "address_table", "address")
        self._azure_config["status_table"] = self._azure_config.get(
            "status_table", "status")
        self._azure_config["balances_table"] = self._azure_config.get(
            "balances_table", "balances")

        if not self._azure_config["account"]:
            raise Exception(
                "Please include the azure account name in the config")
        if not self._azure_config["key"]:
            raise Exception(
                "Please include the azure account key in the config")

        self._service = TableService(
            account_name=self._azure_config["account"],
            account_key=self._azure_config["key"])

        # if tables doesnt exist, create it
        self._create_operations_storage(purge)
        self._create_status_storage(purge)
        self._create_address_storage(purge)
        self._create_balances_storage(purge)
Example #6
0
def get_data_from_table(table_name):

    # Connect to account
    table_service = TableService(
        account_name='soilhumiditydata293s',
        account_key=
        '4PSsEO1xBAIdq3/MppWm+t6eYHi+CWhVn6xNZ6i4mLVgm50K8+NK6lA94v8MxG0bvVEfYCvsv1suxCyCnUYd0A=='
    )

    # Check if table exists
    if not table_service.exists(table_name):
        print("Table does NOT exist.")
        return -1

    # Retrieve all values from table
    table = table_service.query_entities(table_name)

    data = []
    for entry in table:
        # Format timestamp
        eTime = entry['enqueuedTime']
        eTime = datetime.strptime(
            str(eTime[:10]) + " " + str(eTime[11:-8]), '%Y-%m-%d %H:%M:%S')
        entry['enqueuedTime'] = find_closest_15th_minute(
            eTime)  # Round to closest 15th minute
        entry['hour'] = float(entry['enqueuedTime'].hour)

        data.append(entry)

    # Sort by time of reading
    data = sorted(data, key=lambda k: k['enqueuedTime'])

    return data
Example #7
0
def main(req: func.HttpRequest) -> func.HttpResponse:
    logging.info('Starting clean table.')
    ret = dict()
    name = req.headers.get('name')

    if not name:  #If name wasnt added as header, search for it in the parameters
        name = req.params.get('name')

    if name:
        retrieved_secret = getConnectionString()

        table_service = TableService(connection_string=retrieved_secret.value)

        table_service.delete_table(name)
        time.sleep(1)
        existe = False
        while (not existe):
            logging.info("Intentando crearla...")
            time.sleep(5)
            existe = table_service.create_table(name)

        logging.info("Done!!")

        ret['result'] = 'Done!'
        return func.HttpResponse(json.dumps(ret), status_code=200)
    else:
        ret['result'] = 'Please pass a name on the query string or in the request body!'
        return func.HttpResponse(json.dumps(ret), status_code=400)
Example #8
0
def validate_user_in_template(userId, templateId):
    table_service = TableService(
        connection_string=os.environ['AZURE_STORAGE_CONNECTION_STRING'])
    try:
        table_service.get_entity('user', templateId, userId)
    except:
        abort(403, description="user_id not found in template_id")
Example #9
0
def historyDevScale(deviceid, scale):
    # Trying to use AppInsights, but this doesn't seem to work (disabling so app.logger works)
    #if 'APPINSIGHTS_INSTRUMENTATIONKEY' in os.environ:
    #    handler = LoggingHandler(os.environ['APPINSIGHTS_INSTRUMENTATIONKEY'])
    #    logging.basicConfig(handlers=[ handler ], format='%(levelname)s: %(message)s', level=logging.DEBUG)

    #    tc = TelemetryClient(os.environ['APPINSIGHTS_INSTRUMENTATIONKEY'])
    #    tc.track_event("GET /history/%s/%d/%d" % (deviceid, scale, seconds))
    #    tc.flush()

    seconds = request.args.get('seconds', default=3600, type=int)
    timefrom = int(time.time()) - seconds
    query = "timestamp gt %d" % (timefrom)
    app.logger.debug("getting records after %d with query: %s\n" %
                     (timefrom, query))

    table_service = TableService(
        connection_string=os.environ['AzureTableConnectionString'])
    datapoints = table_service.query_entities(TABLE_NAME_HISTORICAL_DATA,
                                              filter=query)

    results = []
    for datapoint in datapoints:
        # Map PartitionKey->deviceid, drop RowKey, Timestamp & etag
        datapoint['deviceid'] = datapoint.pop('PartitionKey')
        datapoint.pop('RowKey')
        datapoint.pop('Timestamp')
        datapoint.pop('etag')
        results.append(datapoint)

    app.logger.debug("Returning %d elemnts: %s" % (len(results), results))
    return json.dumps(results, default=str)
Example #10
0
def migrate_notification_keys(table_service: TableService) -> None:
    table_name = "Notification"
    notifications = table_service.query_entities(
        table_name, select="PartitionKey,RowKey,config")
    partitionKey = None

    count = 0
    for entry in notifications:
        try:
            UUID(entry.PartitionKey)
            continue
        except ValueError:
            pass

        table_service.insert_or_replace_entity(
            table_name,
            {
                "PartitionKey": entry.RowKey,
                "RowKey": entry.PartitionKey,
                "config": entry.config,
            },
        )
        table_service.delete_entity(table_name, entry.PartitionKey,
                                    entry.RowKey)
        count += 1

    print("migrated %s rows" % count)
Example #11
0
def listTaps():
    # Trying to use AppInsights, but this doesn't seem to work (disabling so app.logger works)
    #if 'APPINSIGHTS_INSTRUMENTATIONKEY' in os.environ:
    #    handler = LoggingHandler(os.environ['APPINSIGHTS_INSTRUMENTATIONKEY'])
    #    logging.basicConfig(handlers=[ handler ], format='%(levelname)s: %(message)s', level=logging.DEBUG)

    #    tc = TelemetryClient(os.environ['APPINSIGHTS_INSTRUMENTATIONKEY'])
    #    tc.track_event("GET /")
    #    tc.flush()

    table_service = TableService(
        connection_string=os.environ['AzureTableConnectionString'])
    taps = table_service.query_entities(TABLE_NAME_CONFIGURATION)
    e = Entity

    results = []
    for tap in taps:
        app.logger.debug("working on (type %s) %s" % (type(tap), tap))
        # Convert PartitionKey->deviceid and RowKey->scale, drop 'etag' and copy the rest
        tapdata = {}
        tapdata['deviceid'] = tap.pop('PartitionKey')
        tapdata['scale'] = int(tap.pop('RowKey'))
        tap.pop('etag')
        tapdata.update(tap.items())
        app.logger.debug("appending %s" % tapdata)
        results.append(tapdata)

    app.logger.debug("Returning: " % (results))
    return json.dumps(results, default=str)
def main(req: func.HttpRequest) -> func.HttpResponse:
    logging.info('Python HTTP trigger function processed a request.')

    with open('config.json', 'r') as config_file:
        config_data = json.load(config_file)

    connectionstring = config_data["connectionstring"]
    table_service = TableService(connection_string=connectionstring)

    table = config_data["table"]
    tableExists = table_service.create_table(table)

    courses = readcsv()

    for item in courses:
        # print(item)
        task = Entity()
        task.PartitionKey = item.subject
        task.RowKey = item.instructor
        task.lectures = item.lectures
        task.labs = item.labs
        task.points = item.points
        task.isWeekend = item.isWeekend
        table_service.insert_entity(table, task)

    return func.HttpResponse(
        "Cosmos DB - Table API example database is created.")
Example #13
0
def main(req: func.HttpRequest) -> func.HttpResponse:
    logging.info('Unregister request recieved.')

    server_id = req.params.get('server-id')
    region = req.params.get('region')

    if not server_id:
        return http_utils.create_function_response(
            {'message': 'Missing required parameter: server-id'},
            status_code=400)

    if not region:
        return http_utils.create_function_response(
            {'message': 'Missing required parameter: region'}, status_code=400)

    try:
        table_name = 'servers'
        table_service = TableService(
            connection_string=os.environ['AzureWebJobsStorage'])
        table_service.delete_entity(table_name, region, server_id)

        return http_utils.create_function_response(
            {'message': f'Server {server_id} successfully unregistered.'}, 200)
    except:
        return http_utils.create_function_response(
            {'message': f'Server {server_id} not found.'}, 400)
Example #14
0
def sync_excel_blobs_and_az_tables():
    table_service = TableService(account_name=Config.TABLE_ACCOUNT_NAME,
                                 account_key=Config.TABLE_KEY)
    if not table_service.exists(Config.PRODUCT_TABLE_NAME):
        create_table(table_service, Config.PRODUCT_TABLE_NAME)
    # TODO: Consider insert_or_merge_entity() here instead https://docs.microsoft.com/en-us/python/api/azure-cosmosdb-table/azure.cosmosdb.table.tableservice.tableservice?view=azure-python
    delete_all_entries_in_table(table_service, Config.PRODUCT_TABLE_NAME)
    batch = TableBatch()

    table_data = get_metadata_blob_data()
    products_data = get_product_blobs_data()
    for p in table_data:
        try:
            p["cumulative"] = str(products_data[p["id"]]["cumulative"])
        except KeyError:
            p["cumulative"] = None
        batch.insert_entity(p)
    try:
        table_service.commit_batch(table_name=Config.PRODUCT_TABLE_NAME,
                                   batch=batch)
        print(
            f"Uploaded products data to '{Config.PRODUCT_TABLE_NAME}' table in '{Config.TABLE_ACCOUNT_NAME}' storage account"
        )

    except HeaderParsingError as e:
        print(e)
def main(req: func.HttpRequest) -> func.HttpResponse:
    logging.info("Python HTTP trigger function processed a request.")

    # connect to table
    storage_account_name = os.environ["STORAGE_ACCOUNT_NAME"]
    storage_account_key = os.environ["STORAGE_ACCOUNT_KEY"]

    table_service = TableService(account_name=storage_account_name,
                                 account_key=storage_account_key)

    # get all rows
    rows = table_service.query_entities("links")

    data = {"links": []}

    for row in rows:
        data["links"].append({
            "PartitionKey": row.PartitionKey,
            "RowKey": row.RowKey,
            "ShortUrl": row.ShortUrl,
            "LongUrl": row.LongUrl,
        })

    ret_json = json.dumps(data)

    return func.HttpResponse(body=ret_json, mimetype="application/json")
def migrate_task_os(table_service: TableService) -> None:
    table_name = "Task"
    tasks = table_service.query_entities(
        table_name, select="PartitionKey,RowKey,os,config")
    partitionKey = None

    count = 0
    batch = TableBatch()
    for task in tasks:
        if partitionKey != task.PartitionKey:
            table_service.commit_batch(table_name, batch)
            batch = TableBatch()

        partitionKey = task.PartitionKey
        if "os" not in task or (not task.os):
            config = json.loads(task.config)
            print(config)
            if "windows".lower() in config["vm"]["image"].lower():
                task["os"] = "windows"
            else:
                task["os"] = "linux"
            count = count + 1
        batch.merge_entity(task)
    table_service.commit_batch(table_name, batch)
    print("migrated %s rows" % count)
    def __init__(self,
                 connection_string=None,
                 storage_account_name=None,
                 account_key=None,
                 identity=None,
                 table_name=None):
        assert (storage_account_name and account_key) or connection_string
        self.storage_account_name = storage_account_name
        self.account_key = account_key
        self.table_name = table_name or "defaulttablepython"

        self.create_table_instance = CreateTableInstance(
            identity=identity,
            connection_string=connection_string,
            storage_account_name=storage_account_name,
            account_key=account_key) if identity else None

        self.table_service = TableService(account_name=self.storage_account_name, account_key=self.account_key) \
            if self.storage_account_name and self.account_key else None
        if connection_string:
            self.connection_string = connection_string
            self.table_service = TableService(
                connection_string=self.connection_string)

        self.ROW_KEY_GEN = False
        self.PARTITION_KEY_GEN = False
Example #18
0
def main(req: func.HttpRequest) -> func.HttpResponse:
    logging.info('Get server request recieved.')

    region = req.params.get('region')
    if not region:
        return http_utils.create_function_response(
            {'message': 'Missing required parameter: region'}, 400)

    table_name = 'servers'
    table_service = TableService(
        connection_string=os.environ['AzureWebJobsStorage'])

    servers = list(
        table_service.query_entities(table_name,
                                     filter=f"PartitionKey eq '{region}'"))

    if len(servers) == 0:
        return http_utils.create_function_response(
            {
                'message':
                'No servers are currently available. Please try again in a few minutes.'
            }, 200)

    server = get_best_server(servers)

    return http_utils.create_function_response(
        {'server': {
            'ip': server.ip,
            'port': server.port
        }}, 200)
Example #19
0
 def __init__(self, account_name: str, account_key: str, table_name: str,
              partition_key_field: str, clustering_key_field: str):
     self.table = TableService(account_name=account_name,
                               account_key=account_key)
     self.table_name = self.table_name
     self.partition_key_field = partition_key_field
     self.clustering_key_field = clustering_key_field
Example #20
0
def add_pipeline(account_name,
                 account_key,
                 table_name,
                 partition_name,
                 filter_name,
                 filter_value,
                 name1,
                 value1,
                 name2=None,
                 value2=None,
                 name3=None,
                 value3=None):
    print("Adding a new entry")
    new_entry = {}
    new_entry["RowKey"] = generate_row_key()
    new_entry["PartitionKey"] = partition_name
    new_entry[filter_name] = filter_value
    new_entry[name1] = value1.lower()
    if name2 != None and value2 != None:
        new_entry[name2] = value2.lower()
    if name3 != None and value3 != None:
        new_entry[name3] = value3.lower()
    print(new_entry)
    table_service = TableService(account_name=account_name,
                                 account_key=account_key)
    table_service.insert_entity(table_name, new_entry)
Example #21
0
    def __init__(self, userID):
        # Input: User ID
        # Output: dict{EMR1:[v1, ...],EMR2:[v1, ...]}
        query_parsed = []

        # Connect to Azure Cosmos DB
        the_connection_string = "DefaultEndpointsProtocol=https;AccountName=grand-challenge;AccountKey=zG8AM0FVzaE0cPcQ1NMYPxjE7tSTEQSPvl0CwWlLRTn10ixYlYMF6KFU36dt4D00e66QUoF01hBx0DdNTEtnqQ==;TableEndpoint=https://grand-challenge.table.cosmosdb.azure.com:443/;"
        table_service = TableService(
            endpoint_suffix="table.cosmosdb.azure.com",
            connection_string=the_connection_string)

        # Query EMR data of certain user
        query_filter = "id eq " + "'" + userID + "'"  #find all entities correspoding userID
        tasks = table_service.query_entities('EMR', filter=query_filter)

        # Parse qeuried data
        for task in tasks:
            entity = dict()
            entity['id'] = task.Eid  #can be editted
            entity['EMR_hash'] = task.EMR
            entity['date'] = task.datae
            Result[entity['id']] = []
            query_parsed.append(entity)

        # Store data into dictionary
        for entity in query_parsed:
            Result[entity['EMR_id']].append(entity)

        # Sort the entity list
        for Eid in Result:
            Result[Eid] = sorted(Result[Eid],
                                 key=lambda date: date['commit_date'])
Example #22
0
def update_result(results_entity):
    """ Updates a result in the ``rosiepi`` storage table.

    :param: results_entity: A ``azure.cosmodb.table.models.Entity`` object
                            containing the results to add to the storage
                            table. ``Entity`` object can be retrieved from
                            ``lib/result.py::Result.results_to_table_entity()``.

    :return: The entity's Etag if successful. None if failed.
    """

    response = None
    if isinstance(results_entity, Entity):
        table = TableService(connection_string=os.environ['APP_STORAGE_CONN_STR'])

        try:
            response = table.update_entity('rosiepi', results_entity)
        except Exception as err:
            logging.info(f'Failed to update result in rosiepi table. Error: {err}')
    else:
        logging.info(
            'Result not updated in rosiepi table. Supplied result was an incorrect '
            'type. Should be azure.cosmodb.table.models.Entity. Supplied type: '
            f'{type(results_entity)}'
        )

    return response
Example #23
0
def delete_orphan_record(partition_key, short_id, instance_name, account_key):
    start = time.time()

    table_service = TableService(account_name=instance_name, account_key=account_key)
    table_service.delete_entity('orphans', partition_key, short_id)

    print "delete_orphan_record completed in {} sec".format(time.time()-start)
Example #24
0
 def __init__(self, table_name=None):
     self.azure_storage_name = cnf.get('credentials', 'azure_storage_name')
     self.azure_storage_key = cnf.get('credentials', 'azure_storage_key')
     self.table_service = TableService(account_name=self.azure_storage_name,
                                       account_key=self.azure_storage_key)
     self.table_name = table_name if table_name is not None else cnf.get(
         'resources', 'table_name')
Example #25
0
class StorageManager:
    def __init__(self, table_name=None):
        self.azure_storage_name = cnf.get('credentials', 'azure_storage_name')
        self.azure_storage_key = cnf.get('credentials', 'azure_storage_key')
        self.table_service = TableService(account_name=self.azure_storage_name,
                                          account_key=self.azure_storage_key)
        self.table_name = table_name if table_name is not None else cnf.get(
            'resources', 'table_name')

    def create_table(self):
        self.table_service.create_table(self.table_name)

    def upload_data(self, entities):
        # Count records to upload
        num_entities = len(entities)

        # Upload record by record and print info
        time_start = time.time()
        for i, entity in enumerate(entities):
            self.table_service.insert_or_replace_entity(
                self.table_name, entity)
            print_uploading_state(i + 1, num_entities, self.table_name)
        print_successful_upload_state(num_entities, self.table_name,
                                      time.time() - time_start)

    def query_entities(self, query_filter=None, query_selector=None):
        return self.table_service.query_entities(self.table_name,
                                                 filter=query_filter,
                                                 select=query_selector)
Example #26
0
def setup_table_service(conn_str: str, target_table: str) -> TableService:
    """Setup a Table Service for a the target_table.

    Parameters
    ----------
    conn_str
        Connection string to an Azure storage account
    target_table
        Name of the table we want to create the table service for.

    Raise
    -----
    Raises an exceptions.HttpError if the table was not found in the storage
    account.
    """
    storage_params = extract_storage_parameters(conn_str)
    table_service = TableService(
        account_name=storage_params["AccountName"],
        account_key=storage_params["AccountKey"],
    )

    if not table_service.exists(target_table):
        msg = f"Table {target_table} to store request info did not exist."
        raise exceptions.HttpError(
            msg,
            func.HttpResponse(msg, status_code=500),
        )

    return table_service
Example #27
0
 def __init__(self):
     with open('./vardhamanbot/bot/config/app.json') as app_config_file:
         app_config = json.load(app_config_file)
     self.tableservice = TableService(app_config["ats_name"],
                                      app_config["ats_key"])
     self.tableservice.retry = ExponentialRetry(initial_backoff=30,
                                                increment_base=2,
                                                max_attempts=20).retry
Example #28
0
    def create_if_missing(self, table_service: TableService) -> None:
        try:
            self.disable_storage_client_logging()

            if not table_service.exists(TABLE_NAME):
                table_service.create_table(TABLE_NAME)
        finally:
            self.enable_storage_client_logging()
Example #29
0
    def __init__(self, account_name: str, account_key: str,
                 database_name: str):
        """

        """
        self.table_service = TableService(account_name=account_name,
                                          account_key=account_key)
        self.database_name = database_name
Example #30
0
class InputHandler:
    def __init__(self, env_vars):
        # Blob service init
        self.blob_service_client = BlobServiceClient.from_connection_string(
            env_vars.get("BLOB_CONNECTION_STRING"))
        self.blob_container_name = env_vars.get("BLOB_CONTAINER_NAME")
        # Table service init
        self.table_service = TableService(
            os.environ.get("DB_ACCOUNT_NAME"),
            account_key=env_vars.get("TABLE_KEY"))
        self.table_name = env_vars.get("DB_TABLE_NAME")

    def upload_picture_to_blob(self, img, img_name):
        # Create connection to blob storage
        blob_client = self.blob_service_client.get_blob_client(
            container=self.blob_container_name, blob=img_name)

        # Upload image to blob
        blob_client.upload_blob(img)

    def upload_json_prices(self, prices):
        error = False
        for val in prices:  # Loop through new_prices and add to database
            entry = Entity()
            try:
                entry.PartitionKey = val["county"]
                entry.RowKey = str(uuid.uuid4())  # Generate new random UUID
                entry.price = val["price"]
                entry.location = val["location"]
                if (val["fueltype"] == "diesel"
                        or val["fueltype"] == "gasoline"):
                    entry.fueltype = val["fueltype"]
                else:
                    entry.fueltype = "unknown"
                self.table_service.insert_entity(self.table_name, entry)
            except AttributeError:
                error = True
                print("Error trying to parse JSON object: " + val)

        if error:
            return "Something went wrong. Try check your syntax"
        else:
            return "Inserted successfully"

    def upload_price(self, price, fuel_type, location):
        entry = Entity()
        try:
            entry.PartitionKey = "trondelag"
            entry.RowKey = str(uuid.uuid4())  # Generate new random UUID
            entry.price = price
            entry.location = location
            entry.fueltype = fuel_type
            self.table_service.insert_entity(self.table_name, entry)
        except AttributeError:
            print("Error trying to upload: Fuel type '" + fuel_type +
                  "' Price '" + price + "'")
            return "Something went wrong. Try check your syntax"
        return "Price inserted successfully"
class AzureTableProvider:
    def __init__(self, account_name, key, table_name):
        self.target_table = table_name

        if not account_name or not key:
            raise Exception('Account or key not specified')

        self.table_service = TableService(account_name=account_name, account_key=key)
        self.table_service.create_table(self.target_table)

    def get_all(self):
        return self.table_service.query_entities(self.target_table)

    def remove(self, item):
        query_str = "Link eq '%s'" % item
        tasks = self.table_service.query_entities(self.target_table, filter=query_str)
        if any(tasks):
            for task in tasks:
                self.table_service.delete_entity(self.target_table, task.PartitionKey, task.RowKey)
                return True
        return False

    def add(self, item):
        track = {
            'PartitionKey': 'MusicBotEntry',
            'RowKey': str(uuid.uuid4()),
            'Link': item
        }
        self.table_service.insert_entity(self.target_table, track)
    def __init__(self, account_name, key, table_name):
        self.target_table = table_name

        if not account_name or not key:
            raise Exception('Account or key not specified')

        self.table_service = TableService(account_name=account_name, account_key=key)
        self.table_service.create_table(self.target_table)