def insert_recommendation_entry(self, entries): for entry in entries: recommendation = Entity() recommendation.PartitionKey = "{0}_{1}".format( entry.subreddit, entry.query_word) recommendation.RowKey = entry.keyword recommendation.subreddit = entry.subreddit recommendation.query_word = entry.query_word recommendation.post_id = ','.join(map(str, entry.post_id)) recommendation.comment_id = ','.join(map(str, entry.comment_id)) recommendation.sentiment = entry.sentiment recommendation.count = entry.count try: self.table_service.insert_entity('recommendations', recommendation) except AzureConflictHttpError as error: # print(error) subreddit_query_word = recommendation.PartitionKey.split('_') print( "The recommendation entry with subreddit = '{0}', search term = '{1}', and keyword = '{2}' already exists in the database. Updating it..." .format(subreddit_query_word[0], subreddit_query_word[1], recommendation.RowKey)) self.table_service.update_entity('recommendations', recommendation)
def store_predictions_in_table(predictions, times, table_name="predictedSoilMoistureMessages"): # Connect to account table_service = TableService( account_name='soilhumiditydata293s', account_key= '4PSsEO1xBAIdq3/MppWm+t6eYHi+CWhVn6xNZ6i4mLVgm50K8+NK6lA94v8MxG0bvVEfYCvsv1suxCyCnUYd0A==' ) # Delete existing table predictions table = table_service.query_entities(table_name) for entry in table: table_service.delete_entity(table_name, entry['PartitionKey'], entry['RowKey']) #'tasksSeattle', '001') # Store values in table for i in range(len(predictions)): new_entity = Entity() new_entity.PartitionKey = datetime.strftime(times[i], "%Y-%m-%d %H:%M:%S") new_entity.RowKey = str(i) new_entity['soilmoistureprediction'] = str(predictions[i]) table_service.insert_entity(table_name, new_entity)
def set_stock_min(self, stockdf): # print('>> insert data to azure table') #print (stockdf.head()) stockdf_table = stockdf.rename(columns={"code": "PartitionKey", "date": "RowKey"}) #print (stockdf_table) stockdf_table = stockdf_table.astype({"time":str, "RowKey": str}) # date column을 time 컬럼과 합성 if len(str(stockdf_table["time"])) <= 3: stockdf_table["time"] = '0' + str(stockdf_table["time"]) print(stockdf_table.head()) stockdf_table["RowKey"] = stockdf_table["RowKey"] + stockdf_table["time"] print(stockdf_table.head()) stockdf_last = pd.DataFrame() stockdf_last = stockdf_table[ ['PartitionKey', 'RowKey', 'time', 'open', 'high', 'low', 'close', 'volume'] ] print(stockdf_last) for index, row in stockdf_last.iterrows(): task = Entity() task.PartitionKey = row.to_dict()['PartitionKey'] task.RowKey = str(row.to_dict()['RowKey']) task.time = row.to_dict()['time'] task.open = row.to_dict()['open'] task.high = row.to_dict()['high'] task.low = row.to_dict()['low'] task.close = row.to_dict()['close'] task.volume = row.to_dict()['volume'] #print(task) self.table_service.insert_or_merge_entity('stockM', task, timeout=None)
def post(self, mail): ''' Adds a message ''' if request.is_json: try: try: table_service.get_entity('users', mail, '') message = Entity() details = message_pb2.Message() details.title = request.json["title"] details.content = request.json["content"] details.magic_number = request.json["magic_number"] message.PartitionKey = mail message.RowKey = str( len(list(table_service.query_entities('messages'))) + 1) message.details = EntityProperty(EdmType.BINARY, MessageToJson(details)) table_service.insert_entity('messages', message) return None, 201 except (Azure404): return None, 404 except (KeyError): return 'Please provide a json object conforming to the \ following pattern: {\"title\": \"Message title\", \ \"content\":\"Message content\", \ \"magic_number\": a number}', 400
def put_classification_result(self, image_uuid, results): task = Entity() task.PartitionKey = self.ImagePartitionKey task.RowKey = image_uuid task.results = str(results) ret = self.table_service.insert_or_replace_entity( self.table_name, task) return ret
def set_stock_day(self, targettable, stockdf): # print('>> insert data to azure table') ''' for data in stockdata: task = {'PartitionKey': 'tasksSeattle', 'RowKey': '001','description': 'Take out the trash', 'priority': 200} self.table_service.insert_entity('stockday', task) ''' # dataframe에서 partitionkey 및 row_id 로 칼럼명 변경 필요 # key 값을 두개만 가질 수 있음 # particionkey = code / date = row_id #print (stockdf.head()) stockdf_table = stockdf.rename(columns={"code": "PartitionKey", "date": "RowKey"}) #print (stockdf_table) for index, row in stockdf_table.iterrows(): #print(row) #print(row['PartitionKey']) #print(">> start row") #print(row) task = Entity() task.PartitionKey = row.to_dict()['PartitionKey'] task.RowKey = str(row.to_dict()['RowKey']) task.open = row.to_dict()['open'] task.high = row.to_dict()['high'] task.low = row.to_dict()['low'] task.close = row.to_dict()['close'] task.volume = row.to_dict()['volume'] self.table_service.insert_or_merge_entity(targettable, task, timeout=None)
def main(req: func.HttpRequest) -> func.HttpResponse: logging.info('Python HTTP trigger function processed a request.') CF.BaseUrl.set("https://emotiontrack.cognitiveservices.azure.com/face/v1.0") CF.Key.set("4a1e0d41a8494d71ac0b9028464d8e62") rowkey = req.params.get('rowkey') if not rowkey: logging.error("Missing parameter(s)") return func.HttpResponse("Missing one or more parameter.", status_code=400) face = req.get_json() face_rect = face['faceRectangle'] table = TableService(connection_string=conn_string) if not table: logging.error("Failed to connect to the storage") return func.HttpResponse("Failed to connect to the storage. Please try again later.", status_code=500) test_img = getFaceImage(table, rowkey, face_rect) test_imgIO = io.BytesIO() test_img.save(test_imgIO, format='JPG') entities = table.query_entities(table_name, filter=None) isMatch = False for entity in entities: img = getFaceImage(table, entity.RowKey, entity.rect) imgIO = io.BytesIO() img.save(imgIO, format='JPG') try: res = CF.face.verify(test_imgIO, imgIO) if res['isIdentical']: # update entry entity.RowKey = rowkey entity.rect = face_rect table.update_entity(table_name, entity) isMatch = True break if not isMatch: # new entry entity = Entity() entity.PartitionKey = "1" entity.RowKey = str(uuid.uuid4()) entity.rect = face_rect table.insert_entity(table_name, entity) return func.HttpResponse(entity.RowKey, status_code=200)
def main(req: func.HttpRequest) -> func.HttpResponse: logging.info('Python HTTP trigger function processed a request.') with open('config.json', 'r') as config_file: config_data = json.load(config_file) connectionstring = config_data["connectionstring"] table_service = TableService(connection_string=connectionstring) table = config_data["table"] tableExists = table_service.create_table(table) courses = readcsv() for item in courses: # print(item) task = Entity() task.PartitionKey = item.subject task.RowKey = item.instructor task.lectures = item.lectures task.labs = item.labs task.points = item.points task.isWeekend = item.isWeekend table_service.insert_entity(table, task) return func.HttpResponse( "Cosmos DB - Table API example database is created.")
def data_load(self): data_set = { "required-modules": [ "custom-vnet|azurerm", "custom-sg|azurerm", "custom-blob|azurerm", "custom-vpc|aws", "custom-sg|aws" ], "approved-instances": [ "Standard_A1_v2|azurerm", "Standard_A2_v2|azurerm", "Standard_A4_v2|azurerm", "Standard_A8_v2|azurerm", "t3.micro|aws", "t3.small|aws", "t3.medium|aws", "t3.large|aws" ], "prohibited-resources": [ "azurerm_resource_group|azurerm", "azurerm_virtual_network|azurerm", "azurerm_network_security_group|azurerm", "azurerm_subnet_network_security_group_association|azurerm", "aws_internet_gateway|aws", "aws_route|aws", "aws_route_table|aws", "aws_route_table_association|aws", "aws_subnet|aws", "aws_vpc|aws", "aws_security_group|aws" ], "allowed-resources": [ "azurerm_virtual_machine|azurerm", "azurerm_network_interface|azurerm", "azurerm_public_ip|azurerm", "azurerm_storage_account|azurerm", "aws_instance|aws", "aws_s3_bucket|aws", "aws_s3_bucket_policy|aws" ], "prevent-deletion": ["true"], "default-provider": ["azurerm"], "mandatory-tags": ["Department", "Environment"], "max-cost": ["100"], "ddb-encryption": ["true"], "no-star-access": ["true"] } # delete all entries items = self.table_service.query_entities(self.table_name) for itm in items: self.table_service.delete_entity(self.table_name, itm.PartitionKey, itm.RowKey) # add all entries for category in data_set: for value in data_set[category]: item = Entity() item.PartitionKey = category item.RowKey = value self.table_service.insert_entity(self.table_name, item) return True
def update_storage_cluster_table(**kwargs): import uuid storage_table_service = kwargs.get('storage_table_service') cluster_id = kwargs.get('cluster_id') container_groups = kwargs.get('container_groups') entities = [] for container_group in container_groups: entity = Entity() entity.PartitionKey = 'id' entity.RowKey = str(uuid.uuid4()) entity.id = container_group.name entity.ip = container_group.ip_address.ip entity.cpu = container_group.containers[0].resources.requests.cpu entity.memory_in_gb = container_group.containers[ 0].resources.requests.memory_in_gb entities.append(entity) with concurrent.futures.ThreadPoolExecutor() as executor: futures = { executor.submit(insert_entity_to_table, storage_table_service, cluster_id, entity): entity for entity in entities } concurrent.futures.wait(futures, timeout=30, return_when=concurrent.futures.ALL_COMPLETED)
def add_to_list(self, list_name, value, provider=None): pkey = "" if provider is not None: pkey = "|" + provider item = Entity() item.PartitionKey = list_name item.RowKey = value + pkey main_list = self.get_list(list_name, provider) try: self.table_service.insert_entity(self.table_name, item) except ValueError: pass else: main_list.append(value) return main_list
def create_table_entity(details): entity = Entity() entity.PartitionKey = "python-" + details.FirstName entity.RowKey = details.Surname + str(details.Id) entity.FirstName = details.FirstName entity.Surname = details.Surname entity.FullName = details.FullName return entity
def post(self): """ Inserts user into an Azure table. """ if request.is_json: try: user = Entity() user.PartitionKey = request.json["email"] user.RowKey = '' user.info = EntityProperty(EdmType.BINARY, dumps(request.json)) table_service.insert_or_replace_entity('users', user) except (KeyError): return 'Please provide a json object conforming to \ the following pattern: {\"email\":\"[email protected]\",\ \"password\":\"xxx\", \"full_name\":\"Foo Bar\"}', 400 return None, 201 else: return 'Please supply a json object in order to add a user.', 400
def set_value(self, key, value): item = Entity() item.PartitionKey = key item.RowKey = value retval = False try: entries = self.table_service.query_entities( self.table_name, filter="PartitionKey eq '" + key + "'") old_value = "invalid" for entry in entries: old_value = entry.RowKey self.table_service.delete_entity(self.table_name, key, old_value) self.table_service.insert_entity(self.table_name, item) except ValueError: pass else: retval = True return retval
def computeFaceDetectionAmazon(self,face_image_url, file_name, gender): table_name = 'FFS3' partition_name = 'Amazon' urllib.request.urlretrieve(face_image_url, file_name) with open(file_name, 'rb') as image: response = self.amazon_client.detect_faces(Image={'Bytes': image.read()}, Attributes=['ALL']) faces = response['FaceDetails'] success = False faceDetected = False genderPrediction = 'None' if len(faces) == 0: success = False faceDetected = False else: faceDetected = True genderPrediction = faces[0]["Gender"]["Value"] if gender.lower()==genderPrediction.lower(): success = True else: success = False face_entry = Entity() face_entry.PartitionKey = partition_name face_entry.RowKey = file_name face_entry.Result = json.dumps(faces) face_entry.DetectionSuccess = success self.table_service.insert_entity(table_name, face_entry) return success, faces, faceDetected, genderPrediction.lower()
def computeFaceDetectionIBM(self,face_image_url, file_name, gender): table_name = 'IBM' partition_name = 'IBM' urllib.request.urlretrieve(face_image_url, file_name) with open(file_name, 'rb') as image_file: response = self.IBM_visual_recognition.detect_faces(image_file) faces = response['images'][0]['faces'] success = False faceDetected = False genderPrediction = 'None' if len(faces) == 0: success = False faceDetected = False elif len(faces)>0: faceDetected = True genderPrediction = faces[0]["gender"]["gender"] if gender.lower()==genderPrediction.lower(): success = True face_entry = Entity() face_entry.PartitionKey = partition_name face_entry.RowKey = file_name face_entry.Result = json.dumps(response) face_entry.DetectionSuccess = success self.table_service.insert_entity(table_name, face_entry) return success, faces, faceDetected, genderPrediction.lower()
def computeFaceDetectionMicrosoft(self,face_image_url, file_name, gender): table_name = 'Microsoft' partition_name = 'Microsoft' data = {'url': face_image_url} response = requests.post(self.msft_face_detection_url, params=self.msft_params, headers=self.msft_headers, json=data) faces = response.json() success = False genderPrediction = 'None' faceDetected = False if len(faces) == 0: success = False faceDetected = False elif len(faces)>0: faceDetected = True genderPrediction = faces[0]["faceAttributes"]["gender"] if gender==genderPrediction: success = True face_entry = Entity() face_entry.PartitionKey = partition_name face_entry.RowKey = file_name face_entry.Result = json.dumps(faces) face_entry.DetectionSuccess = success self.table_service.insert_entity(table_name, face_entry) return success, faces, faceDetected, genderPrediction.lower()
def results_to_table_entity(self): """ Format the results into an Azure Storage Table entity. :return: azure.cosmodb.table.models.Entity or None """ if self.results: temp_results = copy.deepcopy(self.results) entity = Entity() entity.PartitionKey = temp_results.get('node_name') run_id = temp_results.get('check_run_id') padding = '0' * (50 - len(run_id)) padded_id = f'{padding}{run_id}' entity.RowKey = padded_id for item in ('PartitionKey', 'RowKey'): if item in temp_results: del temp_results[item] try: json_data = json.dumps(temp_results) entity.update({'json_data': json_data}) except json.JSONDecodeError: logging.warning( 'Failed to JSONify results_to_table_entity value. ' f'Original value: {temp_results}') raise return entity
def store_feedback_skills(job_id, hm_id, cand_id, skills_scores, recommendation, org): try: task = Entity() task.PartitionKey = str(uuid.uuid4()) task.HmId = hm_id task.RowKey = str(cand_id) now = datetime.now() task.LastUpdated = str(now) task.SkillScores = json.dumps(skills_scores) task.JobId = str(job_id) table_service.insert_or_replace_entity(table_name, task) except: raise
def LogMessage(self, msg): """log a message into DB for backup""" cmd = "INSERT INTO dbo.RawMsg (TimeStamp, RawMsg, FromUser, ToUser, MsgType) VALUES "+\ "('%s',N'%s','%s','%s', '%s')" % (msg.TimeStamp, msg.RawContent, msg.FromUser, msg.ToUser, msg.MsgType) self.__ExecNonQuery(cmd) if self.save_to_azuretable: json_string = json.dumps([{'time' : msg.TimeStamp.strftime("%Y-%m-%d %H:%M:%S"), \ 'content' : msg.RawContent,'from' : msg.FromUser,'to' : msg.ToUser,'type' : msg.MsgType}], ensure_ascii=False) task = Entity() task.PartitionKey = msg.TimeStamp.strftime("%Y%m") task.RowKey = msg.TimeStamp.strftime("%Y%m%d%H%M%S") task.description = json_string iRetry = 1 iRetryMax = 10 while iRetry <= iRetryMax: #retry for 10 times try: self.azuretable_service.insert_entity( 'robertlograwmsg', task) except: task.RowKey = (msg.TimeStamp + datetime.timedelta( seconds=iRetry)).strftime("%Y%m%d%H%M%S") iRetry += 1 else: iRetry = iRetryMax + 1
def AppendAction(self, act): """log a user action into DB for futhure query""" cmdstr = "INSERT INTO dbo.Actions (CreateTime, ActionType, FromUser, ActionDetail, ActionStatus) VALUES "+\ "('%s','%s','%s',N'%s', '%s')" % (act.TimeStamp, act.Type, act.FromUser, act.Detail, act.Status) #print(cmdstr) self.__ExecNonQuery(cmdstr) if self.save_to_azuretable: json_string = json.dumps([{'time' : act.TimeStamp.strftime("%Y-%m-%d %H:%M:%S"),\ 'type' : act.Type, 'from' : act.FromUser, 'detail' : act.Detail, 'status' : act.Status}], ensure_ascii=False) task = Entity() task.PartitionKey = act.TimeStamp.strftime("%Y%m") task.RowKey = act.TimeStamp.strftime("%Y%m%d%H%M%S") task.description = json_string iRetry = 1 while iRetry <= 10: #retry for 10 times try: self.azuretable_service.insert_entity( 'robertlogaction', task) except: task.RowKey = (act.TimeStamp + datetime.timedelta( seconds=iRetry)).strftime("%Y%m%d%H%M%S") iRetry += 1 else: iRetry = 11
def __convert_task_to_entity(partition_key, task): return Entity( PartitionKey=partition_key, RowKey=task.id, node_id=task.node_id, state=task.state.value, state_transition_time=task.state_transition_time, command_line=task.command_line, exit_code=task.exit_code, start_time=task.start_time, end_time=task.end_time, failure_info=task.failure_info, )
def table(): account_name = config.STORAGE_ACCOUNT_NAME account_key = config.STORAGE_ACCOUNT_KEY table_service = TableService(account_name=account_name, account_key=account_key) table_name = config.TABLE_NAME #table_service.create_table(table_name) imageId = str(uuid.uuid4()) task = Entity() task.PartitionKey = 'dlws' task.RowKey = imageId task.description = 'test' table_service.insert_or_replace_entity(table_name, task) task = table_service.get_entity(table_name, 'dlws', imageId) print(task.description) tasks = table_service.query_entities('tasktable') for task in tasks: print(task.description) print(task.RowKey)
def save_answer(answer_id, question_id, user_id, result): answer = Entity() answer.PartitionKey = answer_id answer.RowKey = question_id answer.result = result answer.created_by = user_id table_service.insert_entity('fsanswers', answer)
def computeFaceDetectionFacePlusPlus(self,face_image_url, file_name, gender): table_name = 'FacePlusPlus' partition_name = 'FacePlusPlus' boundary = '----------%s' % hex(int(time.time() * 1000)) data = [] data.append('--%s' % boundary) data.append('Content-Disposition: form-data; name="%s"\r\n' % 'api_key') data.append(self.faceplusplus_key) data.append('--%s' % boundary) data.append('Content-Disposition: form-data; name="%s"\r\n' % 'api_secret') data.append(self.faceplusplus_secret) data.append('--%s' % boundary) data.append('Content-Disposition: form-data; name="%s"\r\n' % 'return_attributes') data.append('gender') data.append('--%s' % boundary) data.append('Content-Disposition: form-data; name="%s"\r\n' % 'image_url') data.append(face_image_url) data.append('--%s--\r\n' % boundary) http_body='\r\n'.join(data) req=urllib.request.Request(self.faceplusplus_http_url) req.add_header('Content-Type', 'multipart/form-data; boundary=%s' % boundary) req.data = str.encode(http_body) try: resp = urllib.request.urlopen(req, timeout=5) qrcont=resp.read().decode("utf-8") faces = json.loads(qrcont) success = False faceDetected = False genderPrediction = 'None' if 'faces' in faces.keys(): faceDetected = True genderPrediction = faces["faces"][0]["attributes"]["gender"]["value"] if gender.lower()==genderPrediction.lower(): success = True else: success = None faceDetected = None time.sleep(2) face_entry = Entity() face_entry.PartitionKey = partition_name face_entry.RowKey = file_name face_entry.Result = json.dumps(faces) face_entry.DetectionSuccess = success self.table_service.insert_entity(table_name, face_entry) return success, faces, faceDetected, genderPrediction.lower() except urllib.request.HTTPError as e: return None, None, None, None
def set_container_location(): # if POST is not JSON or does not contain a trackerId abort if not request.json or 'DeviceId' not in request.json: abort(400) e = Entity() e.PartitionKey = 'container' e.RowKey = request.json['DeviceId'] e.device_id = request.json['DeviceId'] e.latitude = request.json['Properties']['1012'] e.longitude = request.json['Properties']['1013'] e.time = request.json['Properties']['5018'] table_service.insert_or_replace_entity('emerson', e) # return success with submitted shipping location return jsonify(e), 201
def upload_price(self, price, fuel_type, location): entry = Entity() try: entry.PartitionKey = "trondelag" entry.RowKey = str(uuid.uuid4()) # Generate new random UUID entry.price = price entry.location = location entry.fueltype = fuel_type self.table_service.insert_entity(self.table_name, entry) except AttributeError: print("Error trying to upload: Fuel type '" + fuel_type + "' Price '" + price + "'") return "Something went wrong. Try check your syntax" return "Price inserted successfully"
def main(req: func.HttpRequest) -> func.HttpResponse: logging.info('Python HTTP trigger function processed a request.') # logging.info(table) pid = req.params.get('pid') ts = int(req.params.get('ts')) value = req.params.get('value') data = req.get_json() new_es = Entity() new_es.PartitionKey = "1" new_es.RowKey = str(uuid.uuid4()) new_es.pid = pid new_es.ts = ts new_es.value = value new_es.data = data etag = table.insert_entity(table_name, new_es) return func.HttpResponse(str(new_es.RowKey), status_code=200)
def set_target_stock(self,df_target): # ['price','volume', 'per','eps'] df_target["date"]=time.strftime('%Y%m%d') stockdf_table = df_target.rename(columns={"date": "PartitionKey", "code": "RowKey"}) for index, row in stockdf_table.iterrows(): #print(row) #print(row['PartitionKey']) #print(">> start row") #print(row) task = Entity() task.PartitionKey = row.to_dict()['PartitionKey'] task.RowKey = str(row.to_dict()['RowKey']) task.price = row.to_dict()['price'] task.volume = row.to_dict()['volume'] task.per = row.to_dict()['per'] task.eps = row.to_dict()['eps'] self.table_service.insert_or_merge_entity('stocktarget', task) print(">> set target stock..." + str(row.to_dict()['RowKey']) )
def main(req: func.HttpRequest) -> func.HttpResponse: logging.info('Python HTTP trigger function processed a request.') # logging.info(table) new_name = req.params.get('name') new_school = req.params.get('school') new_person = Entity() new_person.PartitionKey = "1" new_person.RowKey = str(uuid.uuid4()) new_person.name = new_name new_person.school = new_school etag = table.insert_entity(table_name, new_person) return func.HttpResponse(str(new_person.RowKey), status_code=200)