def start(self, user_job_request): for i in range(0, len(user_job_request["job"]["properties"])): job_name = user_job_request["job"]["properties"][i]["name"] job_version = user_job_request["job"]["properties"][i]["version"] if check_job_name_exists(job_name, job_version): if check_job_version_exists(job_name, job_version): checked_job_request = read_from_s3(job_name, job_version) if not checked_job_request: return "" if not update_ddb(job_name, job_version, "start"): return "" checked_job_request["job"]["control"] = "start" post_to_lambda(job_name, checked_job_request) else: lib.post_to_user( job_name, job_version, "error", msg.ERR_MSG_JOB_VERSION % (job_name, job_version, reference_table)) logger.info(msg.ERR_MSG_JOB_VERSION % (job_name, job_version, reference_table)) return "" else: lib.post_to_user( job_name, job_version, "error", msg.ERR_MSG_JOB_NAME % (job_name, reference_table)) logger.info(msg.ERR_MSG_JOB_NAME % (job_name, reference_table)) return ""
def create_group_version(self, gg_group_id, job_name, job_version, resource_created): CoreDefinitionVersionArn = self.get_group_version_response[ "Definition"]["CoreDefinitionVersionArn"] FunctionDefinitionVersionArn = self.resource_gg_function_definition_version[ "Arn"] GroupId = gg_group_id SubscriptionDefinitionVersionArn = self.resource_gg_subscription_definition_version[ "Arn"] if resource_created: ResourceDefinitionVersionArn = self.resource_gg_resource_definition_version[ "Arn"] else: ResourceDefinitionVersionArn = self.get_group_version_response[ "Definition"]["ResourceDefinitionVersionArn"] try: self.resource_gg_group_version = gg_client.create_group_version( CoreDefinitionVersionArn=CoreDefinitionVersionArn, FunctionDefinitionVersionArn=FunctionDefinitionVersionArn, GroupId=GroupId, ResourceDefinitionVersionArn=ResourceDefinitionVersionArn, SubscriptionDefinitionVersionArn= SubscriptionDefinitionVersionArn) except Exception as err: logger.info("traceback:" + str(err)) lib.post_to_user(job_name, job_version, "error", msg.ERR_MSG_GREENGRASS_CREATE_GROUP_VERSION) return self.remove() return 1
def deploy(self, user_job_request): global enable_metrics job_name = user_job_request["job"]["properties"][0]["name"] job_version = user_job_request["job"]["properties"][0]["version"] if "gg_group_id" in user_job_request["job"]: gg_group_id = user_job_request["job"]["gg_group_id"] else: gg_group_id = "" if check_job_name_exists(job_name, job_version): lib.post_to_user(job_name, job_version, "error", msg.ERR_MSG_DEPLOY_JOB_NAME_ALREAD_EXIST) logger.info(msg.ERR_MSG_DEPLOY_JOB_NAME_ALREAD_EXIST) return "" else: new_resource = resources.generate_greengrass_resources( job_name, job_version, gg_group_id, user_job_request["job"] ["machine-details"]["connectivity-parameters"]["protocol"]) if not new_resource.create( job_name, job_version, gg_group_id, user_job_request["job"] ["machine-details"]["connectivity-parameters"]["protocol"]): return "" user_job_request = lib.create_job(user_job_request) if not user_job_request: return "" if not write_to_s3(user_job_request): return "" if not write_to_ddb(user_job_request): return "" if enable_metrics.lower() == "true": lib.get_metrics(user_job_request) lib.post_to_user(job_name, job_version, "info", msg.INF_MSG_MANUAL_DEPLOY_REQUIRED)
def stop(self, user_job_request): for i in range(0, len(user_job_request["job"]["properties"])): job_name = user_job_request["job"]["properties"][i]["name"] job_version = user_job_request["job"]["properties"][i]["version"] checked_job_request = {} checked_job_request["job"] = {} checked_job_request["job"]["properties"] = [] temp = {} temp["name"] = job_name temp["version"] = job_version checked_job_request["job"]["properties"].append(temp) checked_job_request["job"]["control"] = user_job_request["job"][ "control"] if check_job_name_exists(job_name, job_version): if check_job_version_exists(job_name, job_version): if not update_ddb(job_name, job_version, "stop"): return "" post_to_lambda(job_name, checked_job_request) else: lib.post_to_user( job_name, job_version, "error", msg.ERR_MSG_JOB_VERSION % (job_name, job_version, reference_table)) logger.info(msg.ERR_MSG_JOB_VERSION % (job_name, job_version, reference_table)) return "" else: lib.post_to_user( job_name, job_version, "error", msg.ERR_MSG_JOB_NAME % (job_name, reference_table)) logger.info(msg.ERR_MSG_JOB_NAME % (job_name, reference_table)) return ""
def get_existing_subscription_definitions(self, job_name, job_version): try: list_subscription_definitions = [] list_subscription_definitions_temp_array = gg_client.list_subscription_definitions( MaxResults='30') list_subscription_definitions = list_subscription_definitions_temp_array[ 'Definitions'] # List all available subscription definition while ('NextToken' in list_subscription_definitions_temp_array): tokenValue = list_subscription_definitions_temp_array[ "NextToken"] list_subscription_definitions_temp_array = gg_client.list_subscription_definitions( NextToken=tokenValue) for i in range( 0, len(list_subscription_definitions_temp_array[ 'Definitions'])): list_subscription_definitions.append( list_subscription_definitions_temp_array['Definitions'] [i]) # Only keep the subscription definition that are relevant to the current GreenGrass Group Id subscription_list = [] for i in range(0, len(list_subscription_definitions)): j = 0 if ('LatestVersionArn' in list_subscription_definitions[i]): j = j + 1 if ('SubscriptionDefinitionVersionArn' in self.get_group_version_response["Definition"]): if (self.get_group_version_response["Definition"] ["SubscriptionDefinitionVersionArn"] == list_subscription_definitions[i] ["LatestVersionArn"]): subscription = gg_client.get_subscription_definition_version( SubscriptionDefinitionId= list_subscription_definitions[i]["Id"], SubscriptionDefinitionVersionId= list_subscription_definitions[i] ["LatestVersion"]) if j == 1: subscription_list = subscription['Definition'][ 'Subscriptions'] elif j > 1: subscription_list.append( subscription['Definition'] ['Subscriptions']) else: # definition is not in the group. move to next pass return subscription_list except Exception as err: logger.info("traceback:" + str(err)) lib.post_to_user(job_name, job_version, "error", msg.ERR_MSG_GREENGRASS_RETRIEVE_SUSCRIPTIONS) return self.remove()
def check_job_name_exists(job_name, job_version): try: job_in_ddb = ddb_table.query( KeyConditionExpression=Key("jobid").eq(job_name)) except Exception as err: logger.info("traceback:" + str(err)) lib.post_to_user(job_name, job_version, "error", msg.ERR_MSG_DDB_QUERY_FAILURE) return "" else: return job_in_ddb["Count"]
def update_ddb(job_name, job_version, job_control): # Update key items control and timestamp everytime an action on the job is performed. if job_control == "start": job_in_ddb = ddb_table.query( KeyConditionExpression=Key("jobid").eq(job_name)) for i in range(0, job_in_ddb["Count"]): if (job_in_ddb["Items"][i]["control"] == "start") or (job_in_ddb["Items"][i]["control"] == "update"): #perform tablescan for any job name in deploy state try: ddb_table.update_item( Key={ 'jobid': job_name, 'version': job_in_ddb["Items"][i]['version'] }, UpdateExpression="set #t =:t, control =:c", ExpressionAttributeValues={ ':t': datetime.now().strftime("%d/%m/%Y,%H:%M:%S.%f"), ':c': "stop" }, ExpressionAttributeNames={"#t": "timestamp"}) except Exception as err: logger.info(err) if job_control == "stop": job_in_ddb = ddb_table.query( KeyConditionExpression=Key("jobid").eq(job_name) & Key("version").eq(int(float(str(job_version))))) if (job_in_ddb["Items"][0]["control"] == "stop"): lib.post_to_user(job_name, job_version, "error", msg.ERR_MSG_FAIL_ALREADY_STOP) logger.info(msg.ERR_MSG_FAIL_ALREADY_STOP) return "" try: ddb_table.update_item( Key={ 'jobid': job_name, 'version': int(float(str(job_version))) }, UpdateExpression="set #t =:t, control =:c", ExpressionAttributeValues={ ':t': datetime.now().strftime("%d/%m/%Y,%H:%M:%S.%f"), ':c': job_control }, ExpressionAttributeNames={"#t": "timestamp"}) return 1 except: lib.post_to_user(job_name, job_version, "error", msg.ERR_MSG_DDB_UPDATE_FAILURE % (reference_table)) logger.info(msg.ERR_MSG_DDB_UPDATE_FAILURE % (reference_table)) return ""
def get_existing_resource_definitions(self, job_name, job_version): try: list_resource_definitions = [] list_resource_definitions_temp_array = gg_client.list_resource_definitions( MaxResults='30') list_resource_definitions = list_resource_definitions_temp_array[ 'Definitions'] # List all available local resource definition while ('NextToken' in list_resource_definitions_temp_array): tokenValue = list_resource_definitions_temp_array["NextToken"] list_resource_definitions_temp_array = gg_client.list_resource_definitions( NextToken=tokenValue) for i in range( 0, len(list_resource_definitions_temp_array['Definitions'] )): list_resource_definitions.append( list_resource_definitions_temp_array['Definitions'][i]) # Only keep the local resource definition that are relevant to the current GreenGrass Group Id resource_list = [] for i in range(0, len(list_resource_definitions)): if ('LatestVersionArn' in list_resource_definitions[i]): j = 0 if ('ResourceDefinitionVersionArn' in self.get_group_version_response["Definition"]): if self.get_group_version_response["Definition"]["ResourceDefinitionVersionArn"] == \ list_resource_definitions[i]["LatestVersionArn"]: j = j + 1 resource = gg_client.get_resource_definition_version( ResourceDefinitionId=list_resource_definitions[ i]["Id"], ResourceDefinitionVersionId= list_resource_definitions[i]["LatestVersion"]) if j == 1: resource_list = resource["Definition"][ "Resources"] elif j > 1: resource_list.append( resource["Definition"]["Resources"]) else: # definition is not in the group. move to next pass else: break return resource_list except Exception as err: logger.info("traceback:" + str(err)) lib.post_to_user(job_name, job_version, "error", msg.ERR_MSG_GREENGRASS_RETRIEVE_RESOURCES) return self.remove()
def create_function_definition(self, function_list, gg_resource_id, job_name, job_version): try: self.resource_gg_function_definition = gg_client.create_function_definition( Name='m2c2-opcda-greengrass-function-definition' + job_name) logger.info("create function definition: " + str(self.resource_gg_function_definition)) except Exception as err: logger.info("traceback:" + str(err)) lib.post_to_user(job_name, job_version, "error", msg.ERR_MSG_GREENGRASS_CREATE_FUNCTION_DEFINITION) return self.remove() ##### Add new function to existing function list new_function = { 'FunctionArn': self.resource_connector_lambda_alias["AliasArn"], 'FunctionConfiguration': { 'EncodingType': 'json', 'Environment': { 'AccessSysfs': False, 'ResourceAccessPolicies': [ { 'Permission': 'rw', 'ResourceId': gg_resource_id }, ], }, 'Executable': 'm2c2-opcda-connector.function_handler', 'MemorySize': 128000, 'Pinned': True, 'Timeout': 10 }, 'Id': 'Function-id-' + job_name } function_list.append(new_function) try: self.resource_gg_function_definition_version = gg_client.create_function_definition_version( FunctionDefinitionId=self. resource_gg_function_definition["Id"], Functions=function_list) logger.info("create function definition version: " + str(self.resource_gg_function_definition_version)) except Exception as err: logger.info("traceback:" + str(err)) lib.post_to_user( job_name, job_version, "error", msg.ERR_MSG_GREENGRASS_CREATE_FUNCTION_DEFINITION_VERSION) return self.remove() return 1
def __init__(self, job_request): if job_request["job"]["control"].lower() not in [ "start", "stop", "push", "pull", "update", "deploy" ]: lib.post_to_user( "", "", "error", msg.ERR_MSG_UNKNOWN_CONTROL % (job_request["job"]["control"])) logger.info(msg.ERR_MSG_UNKNOWN_CONTROL % (job_request["job"]["control"])) return "" validated_job_request = lib.json_check(job_request) if not validated_job_request: return self.run(validated_job_request) return
def read_from_s3(job_name, job_version): global reference_bucket, reference_key s3object = s3_client.Object( reference_bucket, reference_key + job_name + "#v" + job_version + ".json") try: return json.loads(s3object.get()['Body'].read().decode('utf-8')) except Exception as err: logger.info("traceback:" + str(err)) lib.post_to_user( job_name, job_version, "error", msg.ERR_MSG_S3_WRITE_FAILURE % (reference_key + job_name + "#v" + job_version + ".json", reference_bucket)) return ""
def create_resource_definition(self, resource_list, job_name, job_version, gg_resource_id): try: self.resource_gg_resource_definition = gg_client.create_resource_definition( Name="M2C2LocalResource" + job_name) logger.info("create resource definition: " + str(self.resource_gg_resource_definition)) except Exception as err: logger.info("traceback:" + str(err)) lib.post_to_user(job_name, job_version, "error", msg.ERR_MSG_GREENGRASS_CREATE_RESOURCE_DEFINITION) return self.remove() # OPCDA connector local resource added to the existing greegrass group resource definitions new_resource_required = True for i in range(0, len(resource_list)): if resource_list[i]["Id"] == gg_resource_id: new_resource_required = False if new_resource_required: new_resource = { "Name": gg_resource_id, "Id": gg_resource_id, "ResourceDataContainer": { "LocalVolumeResourceData": { "SourcePath": "/m2c2/job", "DestinationPath": "/m2c2/job", "GroupOwnerSetting": { "AutoAddGroupOwner": True } } } } resource_list.append(new_resource) try: self.resource_gg_resource_definition_version = gg_client.create_resource_definition_version( ResourceDefinitionId=self. resource_gg_resource_definition["Id"], Resources=resource_list) logger.info("create resource definition version: " + str(self.resource_gg_resource_definition_version)) except Exception as err: logger.info("traceback:" + str(err)) lib.post_to_user( job_name, job_version, "error", msg.ERR_MSG_GREENGRASS_CREATE_RESOURCE_DEFINITION_VERSION) return self.remove() return 1
def create_subscription_definition(self, subscription_list, job_name, job_version): try: self.resource_gg_subscription_definition = gg_client.create_subscription_definition( Name='m2c2-opcda-greengrass-subscription-definition-' + job_name) logger.info("create subscription definition: " + str(self.resource_gg_subscription_definition)) except Exception as err: logger.info("traceback:" + str(err)) lib.post_to_user( job_name, job_version, "error", msg.ERR_MSG_GREENGRASS_CREATE_SUBSCRIPTION_DEFINITION) return self.remove() # OPCDA connector subscriptions are added to the existing greegrass group subscription definitions new_subscription = { 'Id': 'from-cloud-subscription-id' + job_name, 'Source': 'cloud', 'Subject': "m2c2/job/" + job_name + "/submit", 'Target': self.resource_connector_lambda_alias["AliasArn"], } subscription_list.append(new_subscription) new_subscription = { 'Id': 'from-lambda-subscription-id' + job_name, 'Source': self.resource_connector_lambda_alias["AliasArn"], 'Subject': 'm2c2/job/#', 'Target': 'cloud' } subscription_list.append(new_subscription) try: self.resource_gg_subscription_definition_version = gg_client.create_subscription_definition_version( SubscriptionDefinitionId=self. resource_gg_subscription_definition["Id"], Subscriptions=subscription_list) logger.info("create subscription definition version: " + str(self.resource_gg_subscription_definition_version)) except Exception as err: logger.info("traceback:" + str(err)) lib.post_to_user( job_name, job_version, "error", msg.ERR_MSG_GREENGRASS_CREATE_SUBSCRIPTION_DEFINITION_VERSION) return self.remove() return 1
def write_to_s3(user_job_request): job_name = user_job_request["job"]["properties"][0]["name"] job_version = user_job_request["job"]["properties"][0]["version"] global reference_bucket, reference_key s3object = s3_client.Object( reference_bucket, reference_key + job_name + "#v" + job_version + ".json") try: s3object.put( Body=(bytes(json.dumps(user_job_request).encode('UTF-8')))) return 1 except Exception as err: logger.info("traceback:" + str(err)) lib.post_to_user( job_name, job_version, "error", msg.ERR_MSG_S3_READ_FAILURE % (reference_key + job_name + "#v" + job_version + ".json", reference_bucket)) return ""
def pull(self, user_job_request): job_name = user_job_request["job"]["properties"][0]["name"] for i in range(0, len(user_job_request["job"]["properties"])): if check_job_name_exists(job_name, ""): job_name = user_job_request["job"]["properties"][i]["name"] checked_job_request = {} checked_job_request["job"] = {} checked_job_request["job"]["properties"] = [] temp = {} temp["name"] = job_name checked_job_request["job"]["properties"].append(temp) checked_job_request["job"]["control"] = user_job_request[ "job"]["control"] post_to_lambda(job_name, checked_job_request) else: lib.post_to_user( job_name, "", "error", msg.ERR_MSG_PULL_JOB_DOES_NOT_EXISTS % (job_name)) logger.info(msg.ERR_MSG_PULL_JOB_DOES_NOT_EXISTS % (job_name)) return ""
def create_lambda(self, connector_lambda, connector_lambda_role, job_name, job_version, protocol): uniqueID = lib.unique_id() try: self.resource_connector_lambda = lambda_client.create_function( FunctionName="m2c2-" + protocol + "-connector-" + job_name + "-" + uniqueID, Runtime="python2.7", Role=connector_lambda_role, Handler="m2c2-" + protocol + "-connector.function_handler", Code={ "S3Bucket": reference_bucket, "S3Key": connector_lambda + "/m2c2-" + protocol + "-connector.zip", }, Description="m2c2-" + protocol + "-connector-lambda-" + job_name, Timeout=5, MemorySize=128, Publish=True) logger.info("create lambda function: " + str(self.resource_connector_lambda)) except Exception as err: logger.info("traceback:" + str(err)) lib.post_to_user(job_name, job_version, "error", msg.ERR_MSG_GREENGRASS_LAMBDA_CREATE) return "" else: try: self.resource_connector_lambda_alias = lambda_client.create_alias( FunctionName=self.resource_connector_lambda["FunctionArn"], Name="m2c2-" + protocol + "-connector-" + job_name + "-" + uniqueID, FunctionVersion='1') except Exception as err: logger.info("traceback:" + str(err)) lib.post_to_user(job_name, job_version, "error", msg.ERR_MSG_GREENGRASS_LAMBDA_CREATE_ALIAS) return self.remove() return 1
def check_job_version_exists(job_name, job_version): # This function checks whether the user requested job version already exists. If mutliple entries are found, an error is flagged. try: job_in_ddb = ddb_table.query( KeyConditionExpression=Key("jobid").eq(job_name) & Key("version").eq(int(float(str(job_version))))) except Exception as err: logger.info("traceback:" + str(err)) lib.post_to_user(job_name, job_version, "error", msg.ERR_MSG_DDB_QUERY_FAILURE) return "" else: if job_in_ddb["Count"] == 0: return False elif job_in_ddb["Count"] == 1: return True else: lib.post_to_user(job_name, job_version, "error", msg.ERR_MSG_DDB_MULTIPLE_ENTRIES_FOUND) logger.info(msg.ERR_MSG_DDB_MULTIPLE_ENTRIES_FOUND) return ""
def get_group_definition(self, gg_group_id, job_name, job_version): try: self.get_group_response = gg_client.get_group(GroupId=gg_group_id) logger.info("get group: " + str(self.get_group_response)) except Exception as err: logger.info("traceback:" + str(err)) lib.post_to_user(job_name, job_version, "error", msg.ERR_MSG_GREENGRASS_GET_GROUP) return self.remove() else: try: self.get_group_version_response = gg_client.get_group_version( GroupId=self.get_group_response["Id"], GroupVersionId=self.get_group_response["LatestVersion"]) logger.info("get group version: " + str(self.get_group_version_response)) except Exception as err: logger.info("traceback:" + str(err)) lib.post_to_user(job_name, job_version, "error", msg.ERR_MSG_GREENGRASS_GET_GROUP_VERSION) return self.remove() return 1
def push(self, user_job_request): job_name = user_job_request["job"]["properties"][0]["name"] job_version = user_job_request["job"]["properties"][0]["version"] if check_job_name_exists(job_name, job_version): if check_job_version_exists(job_name, job_version): user_job_request = read_from_s3(job_name, job_version) if not user_job_request: return "" if not update_ddb(job_name, job_version, "push"): return "" user_job_request["job"]["control"] = "push" post_to_lambda(job_name, user_job_request) else: lib.post_to_user( job_name, job_version, "error", msg.ERR_MSG_JOB_VERSION % (job_name, job_version, reference_table)) logger.info(msg.ERR_MSG_JOB_VERSION % (job_name, job_version, reference_table)) return "" else: lib.post_to_user( job_name, job_version, "error", msg.ERR_MSG_JOB_NAME % (job_name, reference_table)) logger.info(msg.ERR_MSG_JOB_NAME % (job_name, reference_table)) return ""
def update(self, user_job_request): job_name = user_job_request["job"]["properties"][0]["name"] job_version = user_job_request["job"]["properties"][0]["version"] if check_job_name_exists(job_name, job_version): if check_job_version_exists(job_name, job_version): lib.post_to_user( job_name, job_version, "error", msg.ERR_MSG_UPDATE_JOB_ALREADY_EXISTS % (job_name, job_version)) logger.info(msg.ERR_MSG_UPDATE_JOB_ALREADY_EXISTS % (job_name, job_version)) return "" else: user_job_request["job"]["control"] = "update" user_job_request = lib.create_job(user_job_request) if not write_to_s3(user_job_request): return "" if not write_to_ddb(user_job_request): return "" post_to_lambda(job_name, user_job_request) else: lib.post_to_user( job_name, job_version, "error", msg.ERR_MSG_UPDATE_JOB_DOES_NOT_EXISTS % (job_name)) logger.info(msg.ERR_MSG_UPDATE_JOB_DOES_NOT_EXISTS % (job_name)) return ""
def write_to_ddb(user_job_request): job_name = user_job_request["job"]["properties"][0]["name"] job_version = user_job_request["job"]["properties"][0]["version"] job_control = user_job_request["job"]["control"] if job_control == "update": # Change any entries entries for this job name to "stop" job_in_ddb = ddb_table.query( KeyConditionExpression=Key("jobid").eq(job_name)) for i in range(0, job_in_ddb["Count"]): if (job_in_ddb["Items"][i]["control"] == "start") or (job_in_ddb["Items"][i]["control"] == "update"): try: ddb_table.update_item( Key={ 'jobid': job_in_ddb["Items"][i]["jobid"], 'version': job_in_ddb["Items"][i]["version"] }, UpdateExpression="set #t =:t, control =:c", ExpressionAttributeValues={ ':t': datetime.now().strftime("%d/%m/%Y,%H:%M:%S.%f"), ':c': "stop" }, ExpressionAttributeNames={"#t": "timestamp"}) except: lib.post_to_user( job_name, job_version, "error", msg.ERR_MSG_DDB_UPDATE_FAILURE % (reference_table)) logger.info(msg.ERR_MSG_DDB_UPDATE_FAILURE % (reference_table)) return "" # get greengrass group id to also store in the ddb gg_group_id = "" if job_control == "deploy": if "gg_group_id" in user_job_request["job"]: gg_group_id = user_job_request["job"]["gg_group_id"] else: gg_group_id = os.environ["GGG_ID"] elif job_control == "update": for i in range(0, job_in_ddb["Count"]): if "gggroupid" in job_in_ddb["Items"][i]: if job_in_ddb["Items"][i]["gggroupid"] != "": gg_group_id = job_in_ddb["Items"][i]["gggroupid"] try: ddb_table.put_item( Item={ "jobid": job_name, "timestamp": datetime.now().strftime("%d/%m/%Y,%H:%M:%S.%f"), "site": user_job_request["job"]["machine-details"]["site-name"], "area": user_job_request["job"]["machine-details"]["area"], "process": user_job_request["job"]["machine-details"]["process"], "version": int(float(str(job_version))), "s3 bucket": "https://" + reference_bucket + ".s3-" + os.environ["AWS_REGION"] + ".amazonaws.com" + " / " + reference_key + job_name + "#v" + job_version + ".json", "control": job_control, "gggroupid": gg_group_id, "machine": user_job_request["job"]["machine-details"] ["connectivity-parameters"]["machine-name"], "protocol": user_job_request["job"]["machine-details"] ["connectivity-parameters"]["protocol"] }) return 1 except: lib.post_to_user(job_name, job_version, "error", msg.ERR_MSG_DDB_WRITE_FAILURE % (reference_table)) logger.info(msg.ERR_MSG_DDB_WRITE_FAILURE % (reference_table)) return ""