def LogMessage(self, msg): """log a message into DB for backup""" cmd = "INSERT INTO dbo.RawMsg (TimeStamp, RawMsg, FromUser, ToUser, MsgType) VALUES "+\ "('%s',N'%s','%s','%s', '%s')" % (msg.TimeStamp, msg.RawContent, msg.FromUser, msg.ToUser, msg.MsgType) self.__ExecNonQuery(cmd) if self.save_to_azuretable: json_string = json.dumps([{'time' : msg.TimeStamp.strftime("%Y-%m-%d %H:%M:%S"), \ 'content' : msg.RawContent,'from' : msg.FromUser,'to' : msg.ToUser,'type' : msg.MsgType}], ensure_ascii=False) task = Entity() task.PartitionKey = msg.TimeStamp.strftime("%Y%m") task.RowKey = msg.TimeStamp.strftime("%Y%m%d%H%M%S") task.description = json_string iRetry = 1 iRetryMax = 10 while iRetry <= iRetryMax: #retry for 10 times try: self.azuretable_service.insert_entity( 'robertlograwmsg', task) except: task.RowKey = (msg.TimeStamp + datetime.timedelta( seconds=iRetry)).strftime("%Y%m%d%H%M%S") iRetry += 1 else: iRetry = iRetryMax + 1
def update_storage_cluster_table(**kwargs): import uuid storage_table_service = kwargs.get('storage_table_service') cluster_id = kwargs.get('cluster_id') container_groups = kwargs.get('container_groups') entities = [] for container_group in container_groups: entity = Entity() entity.PartitionKey = 'id' entity.RowKey = str(uuid.uuid4()) entity.id = container_group.name entity.ip = container_group.ip_address.ip entity.cpu = container_group.containers[0].resources.requests.cpu entity.memory_in_gb = container_group.containers[ 0].resources.requests.memory_in_gb entities.append(entity) with concurrent.futures.ThreadPoolExecutor() as executor: futures = { executor.submit(insert_entity_to_table, storage_table_service, cluster_id, entity): entity for entity in entities } concurrent.futures.wait(futures, timeout=30, return_when=concurrent.futures.ALL_COMPLETED)
def set_stock_day(self, targettable, stockdf): # print('>> insert data to azure table') ''' for data in stockdata: task = {'PartitionKey': 'tasksSeattle', 'RowKey': '001','description': 'Take out the trash', 'priority': 200} self.table_service.insert_entity('stockday', task) ''' # dataframe에서 partitionkey 및 row_id 로 칼럼명 변경 필요 # key 값을 두개만 가질 수 있음 # particionkey = code / date = row_id #print (stockdf.head()) stockdf_table = stockdf.rename(columns={"code": "PartitionKey", "date": "RowKey"}) #print (stockdf_table) for index, row in stockdf_table.iterrows(): #print(row) #print(row['PartitionKey']) #print(">> start row") #print(row) task = Entity() task.PartitionKey = row.to_dict()['PartitionKey'] task.RowKey = str(row.to_dict()['RowKey']) task.open = row.to_dict()['open'] task.high = row.to_dict()['high'] task.low = row.to_dict()['low'] task.close = row.to_dict()['close'] task.volume = row.to_dict()['volume'] self.table_service.insert_or_merge_entity(targettable, task, timeout=None)
def computeFaceDetectionIBM(self,face_image_url, file_name, gender): table_name = 'IBM' partition_name = 'IBM' urllib.request.urlretrieve(face_image_url, file_name) with open(file_name, 'rb') as image_file: response = self.IBM_visual_recognition.detect_faces(image_file) faces = response['images'][0]['faces'] success = False faceDetected = False genderPrediction = 'None' if len(faces) == 0: success = False faceDetected = False elif len(faces)>0: faceDetected = True genderPrediction = faces[0]["gender"]["gender"] if gender.lower()==genderPrediction.lower(): success = True face_entry = Entity() face_entry.PartitionKey = partition_name face_entry.RowKey = file_name face_entry.Result = json.dumps(response) face_entry.DetectionSuccess = success self.table_service.insert_entity(table_name, face_entry) return success, faces, faceDetected, genderPrediction.lower()
def computeFaceDetectionAmazon(self,face_image_url, file_name, gender): table_name = 'FFS3' partition_name = 'Amazon' urllib.request.urlretrieve(face_image_url, file_name) with open(file_name, 'rb') as image: response = self.amazon_client.detect_faces(Image={'Bytes': image.read()}, Attributes=['ALL']) faces = response['FaceDetails'] success = False faceDetected = False genderPrediction = 'None' if len(faces) == 0: success = False faceDetected = False else: faceDetected = True genderPrediction = faces[0]["Gender"]["Value"] if gender.lower()==genderPrediction.lower(): success = True else: success = False face_entry = Entity() face_entry.PartitionKey = partition_name face_entry.RowKey = file_name face_entry.Result = json.dumps(faces) face_entry.DetectionSuccess = success self.table_service.insert_entity(table_name, face_entry) return success, faces, faceDetected, genderPrediction.lower()
def insert_recommendation_entry(self, entries): for entry in entries: recommendation = Entity() recommendation.PartitionKey = "{0}_{1}".format( entry.subreddit, entry.query_word) recommendation.RowKey = entry.keyword recommendation.subreddit = entry.subreddit recommendation.query_word = entry.query_word recommendation.post_id = ','.join(map(str, entry.post_id)) recommendation.comment_id = ','.join(map(str, entry.comment_id)) recommendation.sentiment = entry.sentiment recommendation.count = entry.count try: self.table_service.insert_entity('recommendations', recommendation) except AzureConflictHttpError as error: # print(error) subreddit_query_word = recommendation.PartitionKey.split('_') print( "The recommendation entry with subreddit = '{0}', search term = '{1}', and keyword = '{2}' already exists in the database. Updating it..." .format(subreddit_query_word[0], subreddit_query_word[1], recommendation.RowKey)) self.table_service.update_entity('recommendations', recommendation)
def results_to_table_entity(self): """ Format the results into an Azure Storage Table entity. :return: azure.cosmodb.table.models.Entity or None """ if self.results: temp_results = copy.deepcopy(self.results) entity = Entity() entity.PartitionKey = temp_results.get('node_name') run_id = temp_results.get('check_run_id') padding = '0' * (50 - len(run_id)) padded_id = f'{padding}{run_id}' entity.RowKey = padded_id for item in ('PartitionKey', 'RowKey'): if item in temp_results: del temp_results[item] try: json_data = json.dumps(temp_results) entity.update({'json_data': json_data}) except json.JSONDecodeError: logging.warning( 'Failed to JSONify results_to_table_entity value. ' f'Original value: {temp_results}') raise return entity
def post(self, mail): ''' Adds a message ''' if request.is_json: try: try: table_service.get_entity('users', mail, '') message = Entity() details = message_pb2.Message() details.title = request.json["title"] details.content = request.json["content"] details.magic_number = request.json["magic_number"] message.PartitionKey = mail message.RowKey = str( len(list(table_service.query_entities('messages'))) + 1) message.details = EntityProperty(EdmType.BINARY, MessageToJson(details)) table_service.insert_entity('messages', message) return None, 201 except (Azure404): return None, 404 except (KeyError): return 'Please provide a json object conforming to the \ following pattern: {\"title\": \"Message title\", \ \"content\":\"Message content\", \ \"magic_number\": a number}', 400
def AppendAction(self, act): """log a user action into DB for futhure query""" cmdstr = "INSERT INTO dbo.Actions (CreateTime, ActionType, FromUser, ActionDetail, ActionStatus) VALUES "+\ "('%s','%s','%s',N'%s', '%s')" % (act.TimeStamp, act.Type, act.FromUser, act.Detail, act.Status) #print(cmdstr) self.__ExecNonQuery(cmdstr) if self.save_to_azuretable: json_string = json.dumps([{'time' : act.TimeStamp.strftime("%Y-%m-%d %H:%M:%S"),\ 'type' : act.Type, 'from' : act.FromUser, 'detail' : act.Detail, 'status' : act.Status}], ensure_ascii=False) task = Entity() task.PartitionKey = act.TimeStamp.strftime("%Y%m") task.RowKey = act.TimeStamp.strftime("%Y%m%d%H%M%S") task.description = json_string iRetry = 1 while iRetry <= 10: #retry for 10 times try: self.azuretable_service.insert_entity( 'robertlogaction', task) except: task.RowKey = (act.TimeStamp + datetime.timedelta( seconds=iRetry)).strftime("%Y%m%d%H%M%S") iRetry += 1 else: iRetry = 11
def store_predictions_in_table(predictions, times, table_name="predictedSoilMoistureMessages"): # Connect to account table_service = TableService( account_name='soilhumiditydata293s', account_key= '4PSsEO1xBAIdq3/MppWm+t6eYHi+CWhVn6xNZ6i4mLVgm50K8+NK6lA94v8MxG0bvVEfYCvsv1suxCyCnUYd0A==' ) # Delete existing table predictions table = table_service.query_entities(table_name) for entry in table: table_service.delete_entity(table_name, entry['PartitionKey'], entry['RowKey']) #'tasksSeattle', '001') # Store values in table for i in range(len(predictions)): new_entity = Entity() new_entity.PartitionKey = datetime.strftime(times[i], "%Y-%m-%d %H:%M:%S") new_entity.RowKey = str(i) new_entity['soilmoistureprediction'] = str(predictions[i]) table_service.insert_entity(table_name, new_entity)
def create_db_entity(self, spoke, vm_details): vm = Entity() # PartitionKey is nothing but the spoke name vm.PartitionKey = spoke # RowKey is nothing but the VM name itself. vm.RowKey = vm_details['hostname'] vm.name = vm_details['name'] vm.serial_no = vm_details['serial'] vm.ip_addr = vm_details['ip-address'] vm.connected = vm_details['connected'] vm.deactivated = vm_details['deactivated'] vm.subs_id = self.subscription_id vm.delicensed_on = 'not applicable' vm.is_delicensed = 'No' try: self.table_service.insert_entity(self.vmss_table_name, vm) self.logger.info("VM %s with serial no. %s in db" % (vm_details['hostname'], vm_details['serial'])) except Exception as e: self.logger.info("Insert entry to db for %s failed with error %s" % (vm_details['hostname'], e)) return False return True
def main(req: func.HttpRequest) -> func.HttpResponse: logging.info('Python HTTP trigger function processed a request.') with open('config.json', 'r') as config_file: config_data = json.load(config_file) connectionstring = config_data["connectionstring"] table_service = TableService(connection_string=connectionstring) table = config_data["table"] tableExists = table_service.create_table(table) courses = readcsv() for item in courses: # print(item) task = Entity() task.PartitionKey = item.subject task.RowKey = item.instructor task.lectures = item.lectures task.labs = item.labs task.points = item.points task.isWeekend = item.isWeekend table_service.insert_entity(table, task) return func.HttpResponse( "Cosmos DB - Table API example database is created.")
def set_stock_min(self, stockdf): # print('>> insert data to azure table') #print (stockdf.head()) stockdf_table = stockdf.rename(columns={"code": "PartitionKey", "date": "RowKey"}) #print (stockdf_table) stockdf_table = stockdf_table.astype({"time":str, "RowKey": str}) # date column을 time 컬럼과 합성 if len(str(stockdf_table["time"])) <= 3: stockdf_table["time"] = '0' + str(stockdf_table["time"]) print(stockdf_table.head()) stockdf_table["RowKey"] = stockdf_table["RowKey"] + stockdf_table["time"] print(stockdf_table.head()) stockdf_last = pd.DataFrame() stockdf_last = stockdf_table[ ['PartitionKey', 'RowKey', 'time', 'open', 'high', 'low', 'close', 'volume'] ] print(stockdf_last) for index, row in stockdf_last.iterrows(): task = Entity() task.PartitionKey = row.to_dict()['PartitionKey'] task.RowKey = str(row.to_dict()['RowKey']) task.time = row.to_dict()['time'] task.open = row.to_dict()['open'] task.high = row.to_dict()['high'] task.low = row.to_dict()['low'] task.close = row.to_dict()['close'] task.volume = row.to_dict()['volume'] #print(task) self.table_service.insert_or_merge_entity('stockM', task, timeout=None)
def computeFaceDetectionMicrosoft(self,face_image_url, file_name, gender): table_name = 'Microsoft' partition_name = 'Microsoft' data = {'url': face_image_url} response = requests.post(self.msft_face_detection_url, params=self.msft_params, headers=self.msft_headers, json=data) faces = response.json() success = False genderPrediction = 'None' faceDetected = False if len(faces) == 0: success = False faceDetected = False elif len(faces)>0: faceDetected = True genderPrediction = faces[0]["faceAttributes"]["gender"] if gender==genderPrediction: success = True face_entry = Entity() face_entry.PartitionKey = partition_name face_entry.RowKey = file_name face_entry.Result = json.dumps(faces) face_entry.DetectionSuccess = success self.table_service.insert_entity(table_name, face_entry) return success, faces, faceDetected, genderPrediction.lower()
def create_table_entity(details): entity = Entity() entity.PartitionKey = "python-" + details.FirstName entity.RowKey = details.Surname + str(details.Id) entity.FirstName = details.FirstName entity.Surname = details.Surname entity.FullName = details.FullName return entity
def put_classification_result(self, image_uuid, results): task = Entity() task.PartitionKey = self.ImagePartitionKey task.RowKey = image_uuid task.results = str(results) ret = self.table_service.insert_or_replace_entity( self.table_name, task) return ret
def save_answer(answer_id, question_id, user_id, result): answer = Entity() answer.PartitionKey = answer_id answer.RowKey = question_id answer.result = result answer.created_by = user_id table_service.insert_entity('fsanswers', answer)
def computeFaceDetectionFacePlusPlus(self,face_image_url, file_name, gender): table_name = 'FacePlusPlus' partition_name = 'FacePlusPlus' boundary = '----------%s' % hex(int(time.time() * 1000)) data = [] data.append('--%s' % boundary) data.append('Content-Disposition: form-data; name="%s"\r\n' % 'api_key') data.append(self.faceplusplus_key) data.append('--%s' % boundary) data.append('Content-Disposition: form-data; name="%s"\r\n' % 'api_secret') data.append(self.faceplusplus_secret) data.append('--%s' % boundary) data.append('Content-Disposition: form-data; name="%s"\r\n' % 'return_attributes') data.append('gender') data.append('--%s' % boundary) data.append('Content-Disposition: form-data; name="%s"\r\n' % 'image_url') data.append(face_image_url) data.append('--%s--\r\n' % boundary) http_body='\r\n'.join(data) req=urllib.request.Request(self.faceplusplus_http_url) req.add_header('Content-Type', 'multipart/form-data; boundary=%s' % boundary) req.data = str.encode(http_body) try: resp = urllib.request.urlopen(req, timeout=5) qrcont=resp.read().decode("utf-8") faces = json.loads(qrcont) success = False faceDetected = False genderPrediction = 'None' if 'faces' in faces.keys(): faceDetected = True genderPrediction = faces["faces"][0]["attributes"]["gender"]["value"] if gender.lower()==genderPrediction.lower(): success = True else: success = None faceDetected = None time.sleep(2) face_entry = Entity() face_entry.PartitionKey = partition_name face_entry.RowKey = file_name face_entry.Result = json.dumps(faces) face_entry.DetectionSuccess = success self.table_service.insert_entity(table_name, face_entry) return success, faces, faceDetected, genderPrediction.lower() except urllib.request.HTTPError as e: return None, None, None, None
def add_order(table_service): order = Entity() order.PartitionKey = 'ordersSTC' order.RowKey = '002' order.customer = 'Bismark' order.po = '200' order.podate = '05/20/2020' order.deldate = '05/24/2020' order.qty = '800' order.presentation = 'Flobin' order.order = '8500687926' table_service.insert_entity('ordertable', order)
def main(req: func.HttpRequest) -> func.HttpResponse: logging.info('Python HTTP Submit trigger received a request') logging.debug('Creating blob service') table_service = TableService( account_name=os.getenv('AZURE_STORAGE_ACCOUNT'), account_key=os.getenv('AZURE_STORAGE_ACCESS_KEY')) headers_dict = { "Access-Control-Allow-Credentials": "true", "Access-Control-Allow-Origin": "*", "Access-Control-Allow-Methods": "Post" } schema = submit_schema.SubmitMessageSchema() try: job_dict = schema.loads(req.get_body()) except ValidationError: error = f'Failed to validate the submit message' return func.HttpResponse(error, headers=headers_dict, status_code=400) table_name = os.getenv('AZURE_TABLE_NAME') table_service.create_table(table_name) guid = uuid.uuid4() try: job_dict = schema.dump(job_dict) except ValidationError: error = f'Failed to submit job' return func.HttpResponse(error, headers=headers_dict, status_code=400) entity = Entity() entity.PartitionKey = 'await' entity.RowKey = str(guid) entity.Error = "" entity.area_name = job_dict['area_name'] entity.crop = job_dict['crop'] entity.planting_date = job_dict['planting_date'] entity.irrigated = job_dict['irrigated'] entity.fraction = job_dict['fraction'] entity.geometry = json.dumps(job_dict['geometry']) try: table_service.insert_entity(table_name, entity) except TypeError: error = f'Failed to insert to table' return func.HttpResponse(error, headers=headers_dict, status_code=400) response_dict = {} response_dict['guid'] = guid schema = submit_schema.SubmitResponseSchema() response_message = schema.dumps(response_dict) return func.HttpResponse(response_message, headers=headers_dict, mimetype='application/json')
def main(req: func.HttpRequest) -> func.HttpResponse: logging.info('Python HTTP trigger function processed a request.') CF.BaseUrl.set("https://emotiontrack.cognitiveservices.azure.com/face/v1.0") CF.Key.set("4a1e0d41a8494d71ac0b9028464d8e62") rowkey = req.params.get('rowkey') if not rowkey: logging.error("Missing parameter(s)") return func.HttpResponse("Missing one or more parameter.", status_code=400) face = req.get_json() face_rect = face['faceRectangle'] table = TableService(connection_string=conn_string) if not table: logging.error("Failed to connect to the storage") return func.HttpResponse("Failed to connect to the storage. Please try again later.", status_code=500) test_img = getFaceImage(table, rowkey, face_rect) test_imgIO = io.BytesIO() test_img.save(test_imgIO, format='JPG') entities = table.query_entities(table_name, filter=None) isMatch = False for entity in entities: img = getFaceImage(table, entity.RowKey, entity.rect) imgIO = io.BytesIO() img.save(imgIO, format='JPG') try: res = CF.face.verify(test_imgIO, imgIO) if res['isIdentical']: # update entry entity.RowKey = rowkey entity.rect = face_rect table.update_entity(table_name, entity) isMatch = True break if not isMatch: # new entry entity = Entity() entity.PartitionKey = "1" entity.RowKey = str(uuid.uuid4()) entity.rect = face_rect table.insert_entity(table_name, entity) return func.HttpResponse(entity.RowKey, status_code=200)
def upload_price(self, price, fuel_type, location): entry = Entity() try: entry.PartitionKey = "trondelag" entry.RowKey = str(uuid.uuid4()) # Generate new random UUID entry.price = price entry.location = location entry.fueltype = fuel_type self.table_service.insert_entity(self.table_name, entry) except AttributeError: print("Error trying to upload: Fuel type '" + fuel_type + "' Price '" + price + "'") return "Something went wrong. Try check your syntax" return "Price inserted successfully"
def data_load(self): data_set = { "required-modules": [ "custom-vnet|azurerm", "custom-sg|azurerm", "custom-blob|azurerm", "custom-vpc|aws", "custom-sg|aws" ], "approved-instances": [ "Standard_A1_v2|azurerm", "Standard_A2_v2|azurerm", "Standard_A4_v2|azurerm", "Standard_A8_v2|azurerm", "t3.micro|aws", "t3.small|aws", "t3.medium|aws", "t3.large|aws" ], "prohibited-resources": [ "azurerm_resource_group|azurerm", "azurerm_virtual_network|azurerm", "azurerm_network_security_group|azurerm", "azurerm_subnet_network_security_group_association|azurerm", "aws_internet_gateway|aws", "aws_route|aws", "aws_route_table|aws", "aws_route_table_association|aws", "aws_subnet|aws", "aws_vpc|aws", "aws_security_group|aws" ], "allowed-resources": [ "azurerm_virtual_machine|azurerm", "azurerm_network_interface|azurerm", "azurerm_public_ip|azurerm", "azurerm_storage_account|azurerm", "aws_instance|aws", "aws_s3_bucket|aws", "aws_s3_bucket_policy|aws" ], "prevent-deletion": ["true"], "default-provider": ["azurerm"], "mandatory-tags": ["Department", "Environment"], "max-cost": ["100"], "ddb-encryption": ["true"], "no-star-access": ["true"] } # delete all entries items = self.table_service.query_entities(self.table_name) for itm in items: self.table_service.delete_entity(self.table_name, itm.PartitionKey, itm.RowKey) # add all entries for category in data_set: for value in data_set[category]: item = Entity() item.PartitionKey = category item.RowKey = value self.table_service.insert_entity(self.table_name, item) return True
def add_order(table_service, req_body): row_key = get_rows(table_service) + 1 print(row_key) order = Entity() order.PartitionKey = req_body.get('PartitionKey') order.RowKey = '00' + str(row_key) order.customer = req_body.get('customer') order.po = req_body.get('po') order.podate = req_body.get('poDate') order.deldate = req_body.get('delDate') order.qty = req_body.get('qty') order.presentation = req_body.get('presentation') order.order = req_body.get('order') table_service.insert_entity('ordertable', order)
def store_feedback_skills(job_id, hm_id, cand_id, skills_scores, recommendation, org): try: task = Entity() task.PartitionKey = str(uuid.uuid4()) task.HmId = hm_id task.RowKey = str(cand_id) now = datetime.now() task.LastUpdated = str(now) task.SkillScores = json.dumps(skills_scores) task.JobId = str(job_id) table_service.insert_or_replace_entity(table_name, task) except: raise
def main(req: func.HttpRequest) -> func.HttpResponse: logging.info('Python HTTP trigger function processed a request.') # logging.info(table) new_name = req.params.get('name') new_school = req.params.get('school') new_person = Entity() new_person.PartitionKey = "1" new_person.RowKey = str(uuid.uuid4()) new_person.name = new_name new_person.school = new_school etag = table.insert_entity(table_name, new_person) return func.HttpResponse(str(new_person.RowKey), status_code=200)
def insert_sub_date_entry(self, entry): sub_date = Entity() sub_date.PartitionKey = entry.subreddit sub_date.RowKey = entry.title sub_date.created_utc = entry.created_utc sub_date.post_id = entry.post_id try: self.table_service.insert_or_replace_entity( 'mostrecentsubdate', sub_date) except TypeError as error: print(error) print( f"The mostrecentsubdate object is formatted incorrectly and was not updated. One of the parameters is not an int, str, bool or datetime, or defined custom EntityProperty. Continuing..." )
def add_to_list(self, list_name, value, provider=None): pkey = "" if provider is not None: pkey = "|" + provider item = Entity() item.PartitionKey = list_name item.RowKey = value + pkey main_list = self.get_list(list_name, provider) try: self.table_service.insert_entity(self.table_name, item) except ValueError: pass else: main_list.append(value) return main_list
def set_container_location(): # if POST is not JSON or does not contain a trackerId abort if not request.json or 'DeviceId' not in request.json: abort(400) e = Entity() e.PartitionKey = 'container' e.RowKey = request.json['DeviceId'] e.device_id = request.json['DeviceId'] e.latitude = request.json['Properties']['1012'] e.longitude = request.json['Properties']['1013'] e.time = request.json['Properties']['5018'] table_service.insert_or_replace_entity('emerson', e) # return success with submitted shipping location return jsonify(e), 201
def add_entity(StoreName, Address, MaxCapacity, Action, Value, ZipCode, IP, DateTime): #instantiate new entity row = Entity() row.PartitionKey = Address #store location serves as the partition key row.RowKey = DateTime #timestamp serves as the row key previous = get_most_recent_entity(StoreName, Address) #if this is the first entity we are adding if previous is None: if MaxCapacity == None or ZipCode == None or IP == None: return False row.MaxCapacity = MaxCapacity row.ZipCode = ZipCode row.IP = IP if Action == 'inc': row.CurrentOccupancy = Value elif Action == 'dec': row.CurrentOccupancy = str(int(Value) * -1) else: return False # otherwise use previous entity to get previous CurrentOccupancy and other parameters if necessary else: if MaxCapacity == None: row.MaxCapacity = previous.MaxCapacity else: row.MaxCapacity = MaxCapacity if ZipCode == None: row.ZipCode = previous.ZipCode else: row.ZipCode = ZipCode if IP == None: row.IP = previous.IP else: row.IP = IP if Action == 'inc': row.CurrentOccupancy = str( int(previous.CurrentOccupancy) + int(Value)) elif Action == 'dec': row.CurrentOccupancy = str( int(previous.CurrentOccupancy) - int(Value)) else: return False #insert entity into table for the given StoreName table_service.insert_entity(StoreName, row) return True