class AzureTableProvider: def __init__(self, account_name, key, table_name): self.target_table = table_name if not account_name or not key: raise Exception('Account or key not specified') self.table_service = TableService(account_name=account_name, account_key=key) self.table_service.create_table(self.target_table) def get_all(self): return self.table_service.query_entities(self.target_table) def remove(self, item): query_str = "Link eq '%s'" % item tasks = self.table_service.query_entities(self.target_table, filter=query_str) if any(tasks): for task in tasks: self.table_service.delete_entity(self.target_table, task.PartitionKey, task.RowKey) return True return False def add(self, item): track = { 'PartitionKey': 'MusicBotEntry', 'RowKey': str(uuid.uuid4()), 'Link': item } self.table_service.insert_entity(self.target_table, track)
def main(req: func.HttpRequest) -> func.HttpResponse: logging.info("Python HTTP trigger function processed a request.") # connect to table storage_account_name = os.environ["STORAGE_ACCOUNT_NAME"] storage_account_key = os.environ["STORAGE_ACCOUNT_KEY"] table_service = TableService(account_name=storage_account_name, account_key=storage_account_key) # get all rows rows = table_service.query_entities("links") data = {"links": []} for row in rows: data["links"].append({ "PartitionKey": row.PartitionKey, "RowKey": row.RowKey, "ShortUrl": row.ShortUrl, "LongUrl": row.LongUrl, }) ret_json = json.dumps(data) return func.HttpResponse(body=ret_json, mimetype="application/json")
def historyDevScale(deviceid, scale): # Trying to use AppInsights, but this doesn't seem to work (disabling so app.logger works) #if 'APPINSIGHTS_INSTRUMENTATIONKEY' in os.environ: # handler = LoggingHandler(os.environ['APPINSIGHTS_INSTRUMENTATIONKEY']) # logging.basicConfig(handlers=[ handler ], format='%(levelname)s: %(message)s', level=logging.DEBUG) # tc = TelemetryClient(os.environ['APPINSIGHTS_INSTRUMENTATIONKEY']) # tc.track_event("GET /history/%s/%d/%d" % (deviceid, scale, seconds)) # tc.flush() seconds = request.args.get('seconds', default=3600, type=int) timefrom = int(time.time()) - seconds query = "timestamp gt %d" % (timefrom) app.logger.debug("getting records after %d with query: %s\n" % (timefrom, query)) table_service = TableService( connection_string=os.environ['AzureTableConnectionString']) datapoints = table_service.query_entities(TABLE_NAME_HISTORICAL_DATA, filter=query) results = [] for datapoint in datapoints: # Map PartitionKey->deviceid, drop RowKey, Timestamp & etag datapoint['deviceid'] = datapoint.pop('PartitionKey') datapoint.pop('RowKey') datapoint.pop('Timestamp') datapoint.pop('etag') results.append(datapoint) app.logger.debug("Returning %d elemnts: %s" % (len(results), results)) return json.dumps(results, default=str)
def migrate_notification_keys(table_service: TableService) -> None: table_name = "Notification" notifications = table_service.query_entities( table_name, select="PartitionKey,RowKey,config") partitionKey = None count = 0 for entry in notifications: try: UUID(entry.PartitionKey) continue except ValueError: pass table_service.insert_or_replace_entity( table_name, { "PartitionKey": entry.RowKey, "RowKey": entry.PartitionKey, "config": entry.config, }, ) table_service.delete_entity(table_name, entry.PartitionKey, entry.RowKey) count += 1 print("migrated %s rows" % count)
def listTaps(): # Trying to use AppInsights, but this doesn't seem to work (disabling so app.logger works) #if 'APPINSIGHTS_INSTRUMENTATIONKEY' in os.environ: # handler = LoggingHandler(os.environ['APPINSIGHTS_INSTRUMENTATIONKEY']) # logging.basicConfig(handlers=[ handler ], format='%(levelname)s: %(message)s', level=logging.DEBUG) # tc = TelemetryClient(os.environ['APPINSIGHTS_INSTRUMENTATIONKEY']) # tc.track_event("GET /") # tc.flush() table_service = TableService( connection_string=os.environ['AzureTableConnectionString']) taps = table_service.query_entities(TABLE_NAME_CONFIGURATION) e = Entity results = [] for tap in taps: app.logger.debug("working on (type %s) %s" % (type(tap), tap)) # Convert PartitionKey->deviceid and RowKey->scale, drop 'etag' and copy the rest tapdata = {} tapdata['deviceid'] = tap.pop('PartitionKey') tapdata['scale'] = int(tap.pop('RowKey')) tap.pop('etag') tapdata.update(tap.items()) app.logger.debug("appending %s" % tapdata) results.append(tapdata) app.logger.debug("Returning: " % (results)) return json.dumps(results, default=str)
def get_data_from_table(table_name): # Connect to account table_service = TableService( account_name='soilhumiditydata293s', account_key= '4PSsEO1xBAIdq3/MppWm+t6eYHi+CWhVn6xNZ6i4mLVgm50K8+NK6lA94v8MxG0bvVEfYCvsv1suxCyCnUYd0A==' ) # Check if table exists if not table_service.exists(table_name): print("Table does NOT exist.") return -1 # Retrieve all values from table table = table_service.query_entities(table_name) data = [] for entry in table: # Format timestamp eTime = entry['enqueuedTime'] eTime = datetime.strptime( str(eTime[:10]) + " " + str(eTime[11:-8]), '%Y-%m-%d %H:%M:%S') entry['enqueuedTime'] = find_closest_15th_minute( eTime) # Round to closest 15th minute entry['hour'] = float(entry['enqueuedTime'].hour) data.append(entry) # Sort by time of reading data = sorted(data, key=lambda k: k['enqueuedTime']) return data
def migrate_task_os(table_service: TableService) -> None: table_name = "Task" tasks = table_service.query_entities( table_name, select="PartitionKey,RowKey,os,config") partitionKey = None count = 0 batch = TableBatch() for task in tasks: if partitionKey != task.PartitionKey: table_service.commit_batch(table_name, batch) batch = TableBatch() partitionKey = task.PartitionKey if "os" not in task or (not task.os): config = json.loads(task.config) print(config) if "windows".lower() in config["vm"]["image"].lower(): task["os"] = "windows" else: task["os"] = "linux" count = count + 1 batch.merge_entity(task) table_service.commit_batch(table_name, batch) print("migrated %s rows" % count)
class StorageManager: def __init__(self, table_name=None): self.azure_storage_name = cnf.get('credentials', 'azure_storage_name') self.azure_storage_key = cnf.get('credentials', 'azure_storage_key') self.table_service = TableService(account_name=self.azure_storage_name, account_key=self.azure_storage_key) self.table_name = table_name if table_name is not None else cnf.get( 'resources', 'table_name') def create_table(self): self.table_service.create_table(self.table_name) def upload_data(self, entities): # Count records to upload num_entities = len(entities) # Upload record by record and print info time_start = time.time() for i, entity in enumerate(entities): self.table_service.insert_or_replace_entity( self.table_name, entity) print_uploading_state(i + 1, num_entities, self.table_name) print_successful_upload_state(num_entities, self.table_name, time.time() - time_start) def query_entities(self, query_filter=None, query_selector=None): return self.table_service.query_entities(self.table_name, filter=query_filter, select=query_selector)
def main(req: func.HttpRequest) -> func.HttpResponse: logging.info('Get server request recieved.') region = req.params.get('region') if not region: return http_utils.create_function_response( {'message': 'Missing required parameter: region'}, 400) table_name = 'servers' table_service = TableService( connection_string=os.environ['AzureWebJobsStorage']) servers = list( table_service.query_entities(table_name, filter=f"PartitionKey eq '{region}'")) if len(servers) == 0: return http_utils.create_function_response( { 'message': 'No servers are currently available. Please try again in a few minutes.' }, 200) server = get_best_server(servers) return http_utils.create_function_response( {'server': { 'ip': server.ip, 'port': server.port }}, 200)
def update_pipeline(account_name, account_key, table_name, partition_name, filter_name, filter_value, name1, value1, name2=None, value2=None, name3=None, value3=None, name4=None, value4=None, name5=None, value5=None): table_service = TableService(account_name=account_name, account_key=account_key) entities = table_service.query_entities(table_name, filter=filter_name + " eq '"+ filter_value + "'") count = 0 for entity in entities: count = count + 1 add = False if name1 in entity and entity[name1] != value1.lower(): add = True entity[name1] = value1.lower() if name2 != None: if name2 in entity and value2 != None and entity[name2] != value2.lower(): add = True entity[name2] = value2.lower() if value2 != None else None if name3 != None: if name3 in entity and value3 != None and entity[name3] != value3.lower(): add = True entity[name3] = value3.lower() if value3 != None else None if name4 != None: if name4 in entity and and value4 != None entity[name4] != value4.lower(): add = True entity[name4] = value4.lower() if value4 != None else None if name5 != None: if name5 in entity and and value5 != None entity[name5] != value5.lower(): add = True entity[name5] = value5.lower() if value5 != None else None
def store_predictions_in_table(predictions, times, table_name="predictedSoilMoistureMessages"): # Connect to account table_service = TableService( account_name='soilhumiditydata293s', account_key= '4PSsEO1xBAIdq3/MppWm+t6eYHi+CWhVn6xNZ6i4mLVgm50K8+NK6lA94v8MxG0bvVEfYCvsv1suxCyCnUYd0A==' ) # Delete existing table predictions table = table_service.query_entities(table_name) for entry in table: table_service.delete_entity(table_name, entry['PartitionKey'], entry['RowKey']) #'tasksSeattle', '001') # Store values in table for i in range(len(predictions)): new_entity = Entity() new_entity.PartitionKey = datetime.strftime(times[i], "%Y-%m-%d %H:%M:%S") new_entity.RowKey = str(i) new_entity['soilmoistureprediction'] = str(predictions[i]) table_service.insert_entity(table_name, new_entity)
def __init__(self, userID): # Input: User ID # Output: dict{EMR1:[v1, ...],EMR2:[v1, ...]} query_parsed = [] # Connect to Azure Cosmos DB the_connection_string = "DefaultEndpointsProtocol=https;AccountName=grand-challenge;AccountKey=zG8AM0FVzaE0cPcQ1NMYPxjE7tSTEQSPvl0CwWlLRTn10ixYlYMF6KFU36dt4D00e66QUoF01hBx0DdNTEtnqQ==;TableEndpoint=https://grand-challenge.table.cosmosdb.azure.com:443/;" table_service = TableService( endpoint_suffix="table.cosmosdb.azure.com", connection_string=the_connection_string) # Query EMR data of certain user query_filter = "id eq " + "'" + userID + "'" #find all entities correspoding userID tasks = table_service.query_entities('EMR', filter=query_filter) # Parse qeuried data for task in tasks: entity = dict() entity['id'] = task.Eid #can be editted entity['EMR_hash'] = task.EMR entity['date'] = task.datae Result[entity['id']] = [] query_parsed.append(entity) # Store data into dictionary for entity in query_parsed: Result[entity['EMR_id']].append(entity) # Sort the entity list for Eid in Result: Result[Eid] = sorted(Result[Eid], key=lambda date: date['commit_date'])
def update_pipeline(account_name, account_key, table_name, partition_name, filter_name, filter_value, name1, value1, name2=None, value2=None, name3=None, value3=None, name4=None, value4=None): table_service = TableService(account_name=account_name, account_key=account_key) entities = table_service.query_entities(table_name, filter=filter_name + " eq '" + filter_value + "'") count = 0 for entity in entities: count = count + 1 add = False if name1 in entity and entity[name1] != value1.lower(): add = True entity[name1] = value1.lower() if name2 != None and value2 != None: if name2 in entity and entity[name2] != value2.lower(): add = True entity[name2] = value2.lower() if name3 != None and value3 != None: if name3 in entity and entity[name3] != value3.lower(): add = True entity[name3] = value3.lower() if name4 != None and value4 != None: if name4 in entity and entity[name4] != value4.lower(): add = True entity[name4] = value4.lower() if add == False: table_service.update_entity(table_name, entity) print("Updating existing entry") else: guid = generate_row_key() entity["RowKey"] = guid table_service.insert_entity(table_name, entity) print("Adding new entry since one already existed") print(entity) break if count == 0: add_pipeline(account_name, account_key, table_name, partition_name, filter_name, filter_value, name1, value1, name2, value2, name3, value3) print("Done")
def get_leaf(msg): conversation_id = msg['ConversationId'] msg = msg['Messages'] connection_string = os.environ['conversationalkm_STORAGE'] # Create connection to Azure Table Storage table_service = TableService(connection_string= connection_string) # Filter for Conversation Id query_fiter = f"PartitionKey eq 'convs-quality' and RowKey eq '{conversation_id}'" evaluation = list(table_service.query_entities(os.environ.get('table_sample_data'), filter= query_fiter)) return int(evaluation[0].get('wizardSurveyTaskSuccessful')) if len(evaluation) > 0 and evaluation[0].get('wizardSurveyTaskSuccessful') != None else None
def main(req: func.HttpRequest) -> func.HttpResponse: logging.info('Python HTTP Submit trigger received a request') logging.debug('Creating blob service') table_service = TableService( account_name=os.getenv('AZURE_STORAGE_ACCOUNT'), account_key=os.getenv('AZURE_STORAGE_ACCESS_KEY') ) headers_dict = { "Access-Control-Allow-Credentials": "true", "Access-Control-Allow-Origin": "*", "Access-Control-Allow-Methods": "Post" } schema = getjob_schema.GetJobSchema() try: getjob_dict = schema.loads(req.get_body()) except ValidationError: return func.HttpResponse(f'Failed to validate getjob schema', headers=headers_dict, status_code=400 ) if not getjob_dict['num_messages'] == 1: return func.HttpResponse(f'Number of messages should be 1', headers=headers_dict, status_code=400 ) table_name = os.getenv('AZURE_TABLE_NAME') entity = None entities = table_service.query_entities(table_name, filter="PartitionKey eq 'await'") for entity in entities: break if not entity: return func.HttpResponse(f'No job found', headers=headers_dict, status_code=400 ) message = {} message['crop'] = entity.crop message['geometry'] = json.loads(entity.geometry) message['irrigated'] = entity.irrigated message['guid'] = entity.RowKey message['area_name'] = entity.area_name message['planting_date'] = entity.planting_date message['fraction'] = entity.fraction table_service.delete_entity(table_name, entity.PartitionKey, entity.RowKey) entity.PartitionKey = 'processing' table_service.insert_entity(table_name, entity) return func.HttpResponse(json.dumps(message), headers=headers_dict, mimetype='application/json' )
def get_entities(table_service: TableService, accountId: str, tripName: str) -> []: entities = [] query_string: str = "PartitionKey eq '{accountId}' and trip_name eq '{tripName}'".format( accountId=accountId, tripName=tripName) results = table_service.query_entities( table_name='prod', filter=query_string) for result in results: entities.append(result) return entities
def main(req: func.HttpRequest) -> func.HttpResponse: logging.info('Python HTTP trigger function processed a request.') CF.BaseUrl.set("https://emotiontrack.cognitiveservices.azure.com/face/v1.0") CF.Key.set("4a1e0d41a8494d71ac0b9028464d8e62") rowkey = req.params.get('rowkey') if not rowkey: logging.error("Missing parameter(s)") return func.HttpResponse("Missing one or more parameter.", status_code=400) face = req.get_json() face_rect = face['faceRectangle'] table = TableService(connection_string=conn_string) if not table: logging.error("Failed to connect to the storage") return func.HttpResponse("Failed to connect to the storage. Please try again later.", status_code=500) test_img = getFaceImage(table, rowkey, face_rect) test_imgIO = io.BytesIO() test_img.save(test_imgIO, format='JPG') entities = table.query_entities(table_name, filter=None) isMatch = False for entity in entities: img = getFaceImage(table, entity.RowKey, entity.rect) imgIO = io.BytesIO() img.save(imgIO, format='JPG') try: res = CF.face.verify(test_imgIO, imgIO) if res['isIdentical']: # update entry entity.RowKey = rowkey entity.rect = face_rect table.update_entity(table_name, entity) isMatch = True break if not isMatch: # new entry entity = Entity() entity.PartitionKey = "1" entity.RowKey = str(uuid.uuid4()) entity.rect = face_rect table.insert_entity(table_name, entity) return func.HttpResponse(entity.RowKey, status_code=200)
def getAll(): # db = get_db() # cur = db.cursor() # artdisplays = cur.execute( # 'SELECT *' # ' FROM artDisplay' # ' ORDER BY created DESC' # ).fetchall() # cur.close() table_service = TableService(account_name='artsharestorage', account_key='zCN3F1TuFjeSw8alIDF0bcvSQoLe5tJHRcavpRKZ31JUUkPuHLtVSqP9WJ3oQU7ty/ZAisWl8CDcFtZHsZ15MQ==') artuploads = table_service.query_entities('artuploads', filter="PartitionKey eq '" + PARITION_KEY + "'") return jsonify([to_json(artupload) for artupload in artuploads])
def get_data(n_points=1000): accName = 'storagebigbyte' accKey = '9kFGj5tyjdHdnxY4LfkmcLaAeygcjaaEm9rS+5UvUB741t4lnJ0zrqznx/hi2P3ptAZF1c20YKN6tMnWuFXayA==' table_service = TableService(account_name=accName, account_key=accKey) tasks = table_service.query_entities('timevssignal', num_results=n_points) df = pd.DataFrame() for task in tasks: df = df.append(pd.Series(task, name=task.RowKey)) return df
def update_releases(self, containers: List[MagContainer]): """ Update the releases in the MagReleases table based on a list of containers containing MAG releases. :param containers: the containers containing MAG releases. :return: the number of releases created and the number of errors. """ min_date = pendulum.datetime(1601, 1, 1) discovered_date = pendulum.now() # Get all entities table_service = TableService(account_name=self.account_name, account_key=self.account_key) entities = table_service.query_entities(MagRelease.TABLE_NAME) # Get all containers that are not in the MagReleases table new_container_index = dict.fromkeys( set([container.name for container in containers]) - set([entity['SourceContainer'] for entity in entities]), 0) num_new_containers = len(new_container_index) logging.info(f"Num new containers discovered: {num_new_containers}") # Only add new containers num_created = 0 num_errors = 0 for container in containers: if container.name in new_container_index: partition_key = 'mag' row_key = container.release_date.strftime("%Y-%m-%d") release = MagRelease(partition_key, row_key, MagState.discovered, MagTask.not_started, container.release_date, container.name, container.last_modified, '', '', discovered_date, min_date, min_date, account_name=self.account_name, account_key=self.account_key) success = release.create() if success: num_created += 1 else: num_errors += 1 return num_created, num_errors
def main(): dates = date() #print(len(dates)) from azure.cosmosdb.table.tableservice import TableService from azure.cosmosdb.table.models import Entity table_service = TableService( connection_string= 'DefaultEndpointsProtocol=https;AccountName=mtutorlog;AccountKey=5g/uyxepfdZxjrHETQZ0jDj6y6nsU/t6HAMZdPIr16X5BmBCaLlgb1ybsmpwY6jhNHXP6CXrHYdzS0QLTtyXFw==;EndpointSuffix=core.windows.net' ) for i in range(len(dates)): #print(dates[i]) tasks = table_service.query_entities('SystemTelementry' + dates[i], filter="AppName eq 'Chinese-iOS'") for task in tasks: print(task.AppClick)
def getFromTable(wanted_keys, num_results): connection_string, container_name = get_credentials() table_service = TableService(connection_string=connection_string) tasks = list( table_service.query_entities(table_name=fixed_tablename, filter="PartitionKey eq 'Fishlanding'", num_results=num_results)) dictfilt = lambda x, y: dict([(i, x[i]) for i in x if i in set(y)]) result_list = [] for task in tasks: result_list.append(dictfilt(task, wanted_keys)) return result_list
def main(req: func.HttpRequest) -> func.HttpResponse: logging.info('Python HTTP trigger function processed a request.') teams = req.params.get('teams') if not teams: try: req_body = req.get_json() except ValueError: logging.info('Value error retrieving request body') else: teams = req_body logging.info('Request body information retrieved') if isinstance(teams, list): logging.info('Valid list passed to function in body') # get the table details accountkey = os.environ["CupstoreKeyId"] logging.info('Table storage account key retrieved from key vault') accountname = 'thecupstore' # connect to the table and update the player controlled teams table_service = TableService(account_name=accountname, account_key=accountkey) query_string = "Name eq '" counter = 1 for team in teams: if counter == len(teams): query_string += team + "'" else: query_string += team + "' or Name eq '" counter += 1 logging.info('query string: ' + query_string) returned_teams = table_service.query_entities('Teams', filter=query_string) for team in returned_teams: logging.info('editing team: ' + team.Name) team.Controlled = "p1" table_service.update_entity('Teams', team) return func.HttpResponse("", status_code=200) else: logging.info('Invalid list passed to function in body.') return func.HttpResponse("Please provide teams in a list", status_code=422) else: return func.HttpResponse("teams argument invalid", status_code=400)
def get_data(): table_service = TableService( connection_string=tablestorageconnectionstring) entities = table_service.query_entities('PredictionData', filter="") for e in entities: try: if (float(e.Actual) > 0.0): x_train.append(int(e.RowKey)) y_train.append(float(e.Actual)) except: temp = [] for x in range(10): temp.append(int(e.RowKey) + x) x_test = temp print(temp)
class AzureTable(): def __init__(self, account_name, account_key): self.table_service = TableService(account_name=account_name, account_key=account_key) def create_table(self, table_name): return self.table_service.create_table(table_name) def exists_table(self, table_name): return self.table_service.exists(table_name) def insert_or_replace_entity(self, table_name, partition_key, row_key, **kwargs): try: entity = self.table_service.get_entity(table_name, partition_key, row_key) except Exception: # Insert a new entity entity = {'PartitionKey': partition_key, 'RowKey': row_key} for (k, v) in kwargs.items(): entity[k] = v return self.table_service.insert_or_replace_entity(table_name, entity) def insert_or_replace_entity2(self, table_name, entity): return self.table_service.insert_or_replace_entity(table_name, entity) def insert_entity(self, table_name, entity): return self.table_service.insert_entity(table_name, entity) def update_entity(self, table_name, entity): return self.table_service.update_entity(table_name, entity) def get_entity(self, table_name, partition_key, row_key): return self.table_service.get_entity(table_name, partition_key, row_key) def delete_entity(self, table_name, partition_key, row_key): self.table_service.delete_entity(table_name, partition_key, row_key) def delete_table(self, table_name): return self.table_service.delete_table(table_name) def get_entities(self, table_name, partition_key): filter = "PartitionKey eq '{0}'".format(partition_key) return self.table_service.query_entities(table_name, filter)
def main(): # grab size data from Azure service = TableService(STORAGE_ACCOUNT, STORAGE_KEY) sizes = { item["RowKey"]: {k: v for k, v in item.items() if k in HUBS} for item in service.query_entities( FIRMWARE_SIZE_TABLE, filter="PartitionKey eq 'size'", select=",".join(["RowKey"] + HUBS), ) } # merge cplushub into technichub for v in sizes.values(): if v["technichub"] is None: v["technichub"] = v["cplushub"] del v["cplushub"] # grab commit data from GitHub transport = RequestsHTTPTransport( url="https://api.github.com/graphql", headers={"Authorization": f"bearer {GITHUB_TOKEN}"}, ) client = Client(transport=transport, fetch_schema_from_transport=True) query = gql(QUERY % "first:100") commits = [] no_sizes = {h: None for h in HUBS if h != "cplushub"} while True: result = client.execute(query) history = result["repository"]["ref"]["target"]["history"] for edge in history["edges"]: node = edge["node"] node["firmwareSize"] = sizes.get(node["oid"], no_sizes) commits.append(node) if not history["pageInfo"]["hasNextPage"]: break cursor = history["pageInfo"]["endCursor"] query = gql(QUERY % f'first:100, after:"{cursor}"') Path(BUILD_DIR).mkdir(parents=True, exist_ok=True) with open(Path(BUILD_DIR, "commits.json"), "w") as f: json.dump(commits, f, indent=4)
def get_trip_from_table_storage(account_name: str, account_key: str, protocol: str, table_endpoint: str, accountId: str) -> [dict]: table_service = TableService(account_name=account_name, account_key=account_key) connection_string = "DefaultEndpointsProtocol={};AccountName={};AccountKey={};TableEndpoint={};".format( protocol, account_name, account_key, table_endpoint) table_service = TableService(endpoint_suffix="table.cosmos.azure.com", connection_string=connection_string) query_string: str = "PartitionKey eq '{accountId}' and status ne 'archived'".format( accountId=accountId) result = table_service.query_entities(table_name='prod', filter=query_string) return result
def writeResults(results_df) : # change the type of any non-string columns to string for i in [col for col in results_df.columns if type(col) != str] : results_df[col] = results_df[col].astype(str) results_df["PartitionKey"] = results_df["season"] + "-" + results_df["week"] results_df["RowKey"] = results_df["home_team"] + "-at-" + results_df["away_team"] print("OK") table_service = TableService(account_name='predictomatic', account_key='H2LyENOrQJ+QxAHMt3eU6+n4/VMJ3wBFzL9j/eAwf6QYQfkOJKV2r+ArDKYkz1/tToztH/Wp+kDEvfhRlUqaiQ==') for key, value in results_df.iterrows() : linq = "PartitionKey eq \'" + value["PartitionKey"] + "\\ and RowKey eq \\\'" + value["RowKey"] + "\\\'" print(linq) if len(list(table_service.query_entities("results", filter=linq))) == 0 : table_service.insert_entity("results", value.to_dict()) #with TableService(account_name='predictomatic', account_key='H2LyENOrQJ+QxAHMt3eU6+n4/VMJ3wBFzL9j/eAwf6QYQfkOJKV2r+ArDKYkz1/tToztH/Wp+kDEvfhRlUqaiQ==') as table_service :
class AzureTableData: """ This class handles the functionality of getting data from Azure Table Storage cleaning it. """ def __init__(self, args): connect_str = args.connection_str self.table_service = TableService(connection_string=connect_str) def clean_up_table(self, args): """ Fetches all the images from Azure Table Storage. Marks the images as deleted if the image is not present in Azure Marketplace """ allimages = open(args.all_image_list, 'r') images_in_marketplace = allimages.readlines() imagequeryresult = self.table_service.query_entities(args.table_name, filter="IsDeleted eq 0", accept='application/json;odata=minimalmetadata') print("Creating list of images") list_of_images_to_clean_up = [] for image in imagequeryresult: disk_version = image.PartitionKey.split('-')[-1] l = [image_name for image_name in images_in_marketplace if disk_version in image_name] if l == None or len(l) is 0: list_of_images_to_clean_up.append(image) print("Updating", len(list_of_images_to_clean_up)) self.mark_deleted(list_of_images_to_clean_up, args.table_name) def mark_deleted(self, images, table_name): """ Updates Azure Table Storage record by marking it as deleted """ i = 1 for image in images: image.IsDeleted = 1 self.table_service.update_entity(table_name, image) print(i) i += 1
def query_azure_db(filterQuery, selectQuery, tableName, rowKeyColumnName): ## Setup the table service table_service = TableService(account_name=storage_account_name, sas_token=sas_token) ## Get the data from the Storage Account transactions = list( table_service.query_entities(tableName, filter=filterQuery, select=selectQuery)) ## Store data in a Pandas dataframe in a neat way df = pd.DataFrame(transactions) df.rename(columns={'RowKey': rowKeyColumnName}, inplace=True) df.set_index(rowKeyColumnName, inplace=True) df.drop(['etag'], axis=1, inplace=True) return df
def main(mytimer: func.TimerRequest, msg: func.Out[List[str]]) -> func.HttpResponse: alerts = list() STORAGE_CONNECTION_STRING = os.environ.get('AzureWebJobsStorage') service = TableService(connection_string=STORAGE_CONNECTION_STRING) items = service.query_entities('alerts') for item in items: alert = dict() alert['max'] = item['max'] alert['min'] = item['min'] alert['coordinates'] = item['coordinates'] alert['location'] = item['location'] alert['number'] = item['number'] alerts.append(json.dumps(alert)) # write queue msg.set(alerts) return