class Receiver(): def __init__(self): self.Helpers = Helpers() self._confs = self.Helpers.loadConfigs() self.LogFile = self.Helpers.setLogFile(self._confs["aiCore"]["Logs"] + "/Local") self.OpenCV = OpenCV() self.configureSocket() def configureSocket(self): ############################################################### # # Configures the socket we will stream the frames to # ############################################################### context = zmq.Context() self.tassSocket = context.socket(zmq.SUB) self.tassSocket.bind("tcp://*:" + str(self._confs["Socket"]["port"])) self.tassSocket.setsockopt_string(zmq.SUBSCRIBE, np.unicode('')) self.Helpers.logMessage( self.LogFile, "TASS", "INFO", "Connected To Socket: tcp://" + self._confs["Socket"]["host"] + ":" + str(self._confs["Socket"]["port"]))
class Humans(): def __init__(self): self.Helpers = Helpers() self.Logging = Logging() self.JumpWayREST = JumpWayREST() self._confs = self.Helpers.loadConfigs() self.LogFile = self.Logging.setLogFile(self._confs["AI"]["Logs"]+"Client/") self.Logging.logMessage( self.LogFile, "CLIENT", "INFO", "GeniSys AI JumpWay REST Client Ready") def getHumanByFace(self, response): data = {} headers = {'content-type': 'application/json'} cameraEnpoint = self._confs["iotJumpWay"]["API"]["REST"] + "/TASS/0_1_0/checkCamera" self.Logging.logMessage( self.LogFile, "HUMANS", "INFO", "Checking Camera...") response = self.JumpWayREST.apiCall( cameraEnpoint, data, headers) self.Logging.logMessage( self.LogFile, "CLIENT", "OK", "Response: "+str(response)) if response["Response"] == "OK": responseLength = len(response["ResponseData"]) if responseLength == 1: message = "I detected " + str(responseLength) + " human, " + response["ResponseData"][0]["userid"] else: message = "I detected " + str(responseLength) + " humans, " return message else: return response["ResponseMessage"]
class Client(): def __init__(self, user): self.Helpers = Helpers() self.Logging = Logging() self._confs = self.Helpers.loadConfigs() self.LogFile = self.Logging.setLogFile(self._confs["AI"]["Logs"] + "Client/") self.apiUrl = self._confs["AI"]["FQDN"] + "/communicate/infer/" + user self.headers = {"content-type": 'application/json'} self.Logging.logMessage(self.LogFile, "CLIENT", "INFO", "GeniSys AI Client Ready")
class Server(): def __init__(self): self._configs = {} self.movidius = None self.cameraStream = None self.imagePath = None self.mean = 128 self.std = 1 / 128 self.categories = [] self.fgraphfile = None self.fgraph = None self.reqsize = None self.Helpers = Helpers() self._configs = self.Helpers.loadConfigs() print("-- Server Initiated") def CheckDevices(self): #mvnc.SetGlobalOption(mvnc.GlobalOption.LOGLEVEL, 2) devices = mvnc.EnumerateDevices() if len(devices) == 0: print('!! WARNING! No Movidius Devices Found !!') quit() self.movidius = mvnc.Device(devices[0]) self.movidius.OpenDevice() print("-- Movidius Connected") def allocateGraph(self, graphfile, graphID): self.fgraph = self.movidius.AllocateGraph(graphfile) def loadRequirements(self, graphID): with open(self._configs["ClassifierSettings"]["NetworkPath"] + self._configs["ClassifierSettings"]["Graph"], mode='rb') as f: self.fgraphfile = f.read() self.allocateGraph(self.fgraphfile, "TASS") print("-- Allocated TASS Graph OK")
class JumpWayREST(): def __init__(self): self.Helpers = Helpers() self.Logging = Logging() self._confs = self.Helpers.loadConfigs() self.LogFile = self.Logging.setLogFile(self._confs["AI"]["Logs"]+"Client/") def createHashMac(self, secret, data): return hmac.new(bytearray(secret.encode("utf-8")), data.encode("utf-8"), digestmod=hashlib.sha256).hexdigest() def apiCall(self, apiUrl, data, headers): self.Logging.logMessage( self.LogFile, "JUMPWAY", "INFO", "Sending JumpWay REST Request") response = requests.post( apiUrl, data=json.dumps(data), headers=headers, auth=HTTPBasicAuth( self._confs["iotJumpWay"]["App"], self.createHashMac( self._confs["iotJumpWay"]["API"]["Secret"], self._confs["iotJumpWay"]["API"]["Secret"]))) output = json.loads(response.content) self.Logging.logMessage( self.LogFile, "JUMPWAY", "INFO", "JumpWay REST Response Received: " + str(output)) return output
class Client(): def __init__(self): self.Helpers = Helpers() self._configs = self.Helpers.loadConfigs() self.addr = "http://"+self._configs["Cameras"][0]["Stream"]+':'+str(self._configs["Cameras"][0]["StreamPort"]) self.TASSapiUrl = self.addr + '/api/TASS/infer' self.content_type = 'image/jpeg' self.headers = {'content-type': self.content_type} print("-- Client Initiated") self.testTASS() def testTASS(self): print("-- Using TASS Facenet Classification") print("") testingDir = self._configs["ClassifierSettings"]["NetworkPath"] + self._configs["ClassifierSettings"]["TestingPath"] for test in os.listdir(testingDir): print("-- Testing Dir: "+testingDir) if test.endswith('.jpg') or test.endswith('.jpeg') or test.endswith('.png') or test.endswith('.gif'): print("-- Sending "+testingDir+test) self.sendImage(testingDir+test,"TASS") print("") def sendImage(self, image, model): img = cv2.imread(image) _, img_encoded = cv2.imencode('.png', img) response = requests.post(self.TASSapiUrl, data=img_encoded.tostring(), headers=self.headers) print(json.loads(response.text))
class Receiver(): def __init__(self): ############################################################### # # Sets up all default requirements and placeholders # needed for the NLU engine to run. # ############################################################### self.Helpers = Helpers() self._confs = self.Helpers.loadConfigs() self.LogFile = self.Helpers.setLogFile(self._confs["aiCore"]["Logs"] + "/Foscam") self.OpenCV = OpenCV() self.OpenCVCapture = None self.configureSocket() def configureSocket(self): ############################################################### # # Configures and connects to the socket. # ############################################################### context = zmq.Context() self.tassSocket = context.socket(zmq.SUB) self.tassSocket.bind("tcp://*:" + str(self._confs["Socket"]["port"])) self.tassSocket.setsockopt_string(zmq.SUBSCRIBE, np.unicode('')) self.Helpers.logMessage( self.LogFile, "Streamer", "INFO", "Connected To Socket: tcp://" + self._confs["Socket"]["host"] + ":" + str(self._confs["Socket"]["port"]))
class NLU(): def __init__(self): self.Helpers = Helpers() self.Logging = Logging() self._confs = self.Helpers.loadConfigs() self.LogFile = self.Logging.setLogFile(self._confs["AI"]["Logs"] + "NLU/") self.ChatLogFile = self.Logging.setLogFile(self._confs["AI"]["Logs"] + "Chat/") self.Logging.logMessage(self.LogFile, "NLU", "INFO", "NLU Classifier LogFile Set") self.startMQTT() def commandsCallback(self, topic, payload): self.Logging.logMessage( self.LogFile, "iotJumpWay", "INFO", "Recieved iotJumpWay Command Data : " + str(payload)) commandData = json.loads(payload.decode("utf-8")) def startMQTT(self): try: self.jumpwayClient = jumpWayDevice.DeviceConnection({ "locationID": self._confs["iotJumpWay"]["Location"], "zoneID": self._confs["iotJumpWay"]["Zone"], "deviceId": self._confs["iotJumpWay"]["Device"], "deviceName": self._confs["iotJumpWay"]["DeviceName"], "username": self._confs["iotJumpWay"]["MQTT"]["Username"], "password": self._confs["iotJumpWay"]["MQTT"]["Password"] }) self.jumpwayClient.connectToDevice() self.jumpwayClient.subscribeToDeviceChannel("Commands") self.jumpwayClient.deviceCommandsCallback = self.commandsCallback self.Logging.logMessage(self.LogFile, "iotJumpWay", "INFO", "iotJumpWay Client Ready") except Exception as e: self.Logging.logMessage(self.LogFile, "iotJumpWay", "INFO", "iotJumpWay Client Initiation Failed") print(str(e)) sys.exit() def setup(self): self.Logging.logMessage(self.LogFile, "NLU", "INFO", "NLU Classifier Initiating") self.Data = Data(self.Logging, self.LogFile) self.Model = Model() self.Context = Context() self.user = {} self.ner = None self.trainingData = self.Data.loadTrainingData() self.trainedData = self.Data.loadTrainedData() self.trainedWords = self.trainedData["words"] self.trainedClasses = self.trainedData["classes"] self.x = self.trainedData["x"] self.y = self.trainedData["y"] self.intentMap = self.trainedData["iMap"][0] self.restoreEntitiesModel() self.restoreModel() self.Logging.logMessage(self.LogFile, "NLU", "INFO", "NLU Ready") def restoreEntitiesModel(self): if os.path.exists(self._confs["ClassifierSettings"]["EntitiesDat"]): self.ner = named_entity_extractor( self._confs["ClassifierSettings"]["EntitiesDat"]) self.Logging.logMessage(self.LogFile, "NER", "OK", "Restored NLU NER Model") def restoreModel(self): self.tmodel = self.Model.buildDNN(self.x, self.y) self.Logging.logMessage(self.LogFile, "NLU", "INFO", "Restored NLU Model") def setupEntities(self): if self._confs["ClassifierSettings"]["Entities"] == "Mitie": self.entityExtractor = Entities() self.Logging.logMessage(self.LogFile, "NER", "INFO", "NLU Entity Extractor Initiated") def initiateSession(self, userID): self.userID = userID if not self.userID in self.user: self.user[self.userID] = {} self.user[self.userID]["history"] = {} self.Logging.logMessage(self.LogFile, "Session", "INFO", "NLU Session Ready For User #" + self.userID) def setThresholds(self, threshold): self.threshold = float(threshold) self.entityThrshld = self._confs["ClassifierSettings"]["Mitie"][ "Threshold"] def predict(self, parsedSentence): predictions = [[index, confidence] for index, confidence in enumerate( self.tmodel.predict([ self.Data.makeInferenceBag(parsedSentence, self.trainedWords) ])[0]) if confidence > self.threshold] predictions.sort(key=lambda x: x[1], reverse=True) classification = [] for prediction in predictions: classification.append( (self.trainedClasses[prediction[0]], prediction[1])) return classification def talk(self, sentence, debug=False): self.Logging.logMessage(self.LogFile, "GeniSys", "STATUS", "Processing") parsed, fallback, entityHolder, parsedSentence = self.entityExtractor.parseEntities( sentence, self.ner, self.trainingData) classification = self.predict(parsedSentence) if len(classification) > 0: clearEntities = False theIntent = self.trainingData["intents"][self.intentMap[ classification[0][0]]] if len(entityHolder) and not len(theIntent["entities"]): clearEntities = True if (self.Context.checkSessionContext(self.user[self.userID], theIntent)): if self.Context.checkClearContext(theIntent, 0): self.user[self.userID]["context"] = "" contextIn, contextOut, contextCurrent = self.Context.setContexts( theIntent, self.user[self.userID]) if fallback and "fallbacks" in theIntent and len( theIntent["fallbacks"]): response = self.entityExtractor.replaceResponseEntities( random.choice(theIntent["fallbacks"]), entityHolder) action, actionResponses = self.Helpers.setAction(theIntent) elif "entityType" in theIntent and theIntent[ "entityType"] == "Numbers": response = random.choice(theIntent["responses"]) action, actionResponses = self.Helpers.setAction(theIntent) elif not len(entityHolder) and len(theIntent["entities"]): response = self.entityExtractor.replaceResponseEntities( random.choice(theIntent["fallbacks"]), entityHolder) action, actionResponses = self.Helpers.setAction(theIntent) elif clearEntities: entityHolder = [] response = random.choice(theIntent["responses"]) action, actionResponses = self.Helpers.setAction(theIntent) else: response = self.entityExtractor.replaceResponseEntities( random.choice(theIntent["responses"]), entityHolder) action, actionResponses = self.Helpers.setAction(theIntent) if action != None: classParts = action.split(".") classFolder = classParts[0] className = classParts[1] module = __import__(classParts[0] + "." + classParts[1], globals(), locals(), [className]) actionClass = getattr(module, className)() response = getattr(actionClass, classParts[2])( random.choice(actionResponses)) return { "Response": "OK", "ResponseData": [{ "Received": sentence, "Intent": classification[0][0], "Confidence": str(classification[0][1]), "Response": response, "ContextIn": contextIn, "ContextOut": contextOut, "Context": contextCurrent, "Action": action, "Entities": entityHolder }] } else: self.user[self.userID]["context"] = "" contextIn, contextOut, contextCurrent = self.Context.setContexts( theIntent, self.user[self.userID]) if fallback and fallback in theIntent and len( theIntent["fallbacks"]): response = self.entityExtractor.replaceResponseEntities( random.choice(theIntent["fallbacks"]), entityHolder) action, actionResponses = None, [] else: response = self.entityExtractor.replaceResponseEntities( random.choice(theIntent["responses"]), entityHolder) action, actionResponses = self.Helpers.setAction(theIntent) if action != None: classParts = action.split(".") classFolder = classParts[0] className = classParts[1] module = __import__(classParts[0] + "." + classParts[1], globals(), locals(), [className]) actionClass = getattr(module, className)() response = getattr(actionClass, classParts[2])( random.choice(actionResponses)) else: response = self.entityExtractor.replaceResponseEntities( random.choice(theIntent["responses"]), entityHolder) return { "Response": "OK", "ResponseData": [{ "Received": sentence, "Intent": classification[0][0], "Confidence": str(classification[0][1]), "Response": response, "ContextIn": contextIn, "ContextOut": contextOut, "ContextCurrent": contextCurrent, "Action": action, "Entities": entityHolder }] } else: contextCurrent = self.Context.getCurrentContext( self.user[self.userID]) return { "Response": "FAILED", "ResponseData": [{ "Received": sentence, "Intent": "UNKNOWN", "Confidence": "NA", "Responses": [], "Response": random.choice( self._confs["ClassifierSettings"]["defaultResponses"]), "ContextIn": "NA", "ContextOut": "NA", "ContextCurrent": contextCurrent, "Action": "NA", "Entities": entityHolder }] }
class MySql(): def __init__(self): ############################################################### # # Sets up all default requirements and placeholders # needed for the MySql connection. # # - Helpers: Useful global functions # ############################################################### self.Helpers = Helpers() self._confs = self.Helpers.loadConfigs() self.mysqlDbConn = None self.mysqlDbCur = None self.mysqlConnect() def mysqlConnect(self): ############################################################### # # Connects to MySql using configuration in required/confs.json # ############################################################### try: self.mysqlDbConn = pymysql.connect( host = self._confs["aiCore"]["IP"], user = self._confs["MySql"]["dbusername"], passwd = self._confs["MySql"]["dbpassword"], db = self._confs["MySql"]["dbname"]) except Exception as errorz: print('FAILED') print(errorz) def setMysqlCursor(self): ############################################################### # # Sets MySql cursor for a single row # ############################################################### try: self.mysqlDbCur = self.mysqlDbConn.cursor() except Exception as errorz: print('FAILED') print(errorz) def setMysqlCursorRows(self): ############################################################### # # Sets MySql cursor for a list of rows # ############################################################### try: self.mysqlDbCur = self.mysqlDbConn.cursor(pymysql.cursors.DictCursor) except Exception as errorz: print('FAILED') print(errorz)
class Entities(): def __init__(self): self.Helpers = Helpers() self._confs = self.Helpers.loadConfigs() self.stemmer = LancasterStemmer() def loadEntities(self): if os.path.exists(self._confs["ClassifierSettings"]["EntitiesDat"]): return named_entity_extractor(self._confs["ClassifierSettings"]["EntitiesDat"]) def parseEntities(self, sentence, ner, trainingData): entityHolder = [] fallback = False parsedSentence = sentence parsed = "" if os.path.exists(self._confs["ClassifierSettings"]["EntitiesDat"]): tokens = sentence.lower().split() entities = ner.extract_entities(tokens) for e in entities: range = e[0] tag = e[1] score = e[2] scoreText = "{:0.3f}".format(score) if score > self._confs["ClassifierSettings"]["Mitie"]["Threshold"]: parsed, fallback = self.replaceEntity( " ".join(tokens[i] for i in range), tag, trainingData) entityHolder.append({ "Entity":tag, "ParsedEntity":parsed, "Confidence":str(scoreText)}) parsedSentence = sentence.replace( " ".join(sentence.split()[i] for i in range), "<"+tag+">") else: parsed, fallback = self.replaceEntity( " ".join(tokens[i] for i in range), tag, trainingData) entityHolder.append({ "Entity":tag, "ParsedEntity":parsed, "Confidence":str(scoreText)}) parsed = parsedSentence return parsed, fallback, entityHolder, parsedSentence def replaceResponseEntities(self, response, entityHolder): for entity in entityHolder: response = response.replace("<"+entity["Entity"]+">", entity["ParsedEntity"].title()) return response def replaceEntity(self, value, entity, trainingData): lowEntity = value.lower() match = True if "entitieSynonyms" in trainingData: for entities in trainingData["entitieSynonyms"]: for synonyms in entities[entity]: for synonym in synonyms["synonyms"]: if lowEntity == synonym.lower(): lowEntity = synonyms["value"] match = False break return lowEntity, match def trainEntities(self, mitiemLocation, trainingData): trainer = ner_trainer(mitiemLocation) counter = 0 hasEnts = 0 for intents in trainingData['intents']: i = 0 for entity in intents['entities']: hasEnts = 1 tokens = trainingData['intents'][counter]["text"][i].lower().split() data = ner_training_instance(tokens) data.add_entity( xrange( entity["rangeFrom"], entity["rangeTo"]), entity["entity"]) trainer.add(data) i = i + 1 counter = counter + 1 if hasEnts: trainer.num_threads = 4 ner = trainer.train() ner.save_to_disk(self._confs["ClassifierSettings"]["EntitiesDat"])
class Classifier(): def __init__(self): self._configs = {} self.movidius = None self.jumpwayClient = None self.cameraStream = None self.imagePath = None self.mean = 128 self.std = 1/128 self.categories = [] self.graphfile = None self.graph = None self.reqsize = None self.extensions = [ ".jpg", ".png" ] self.CheckDevices() self.Helpers = Helpers() self._configs = self.Helpers.loadConfigs() self.startMQTT() print("") print("-- Classifier Initiated") print("") def CheckDevices(self): #mvnc.SetGlobalOption(mvnc.GlobalOption.LOGLEVEL, 2) devices = mvnc.EnumerateDevices() if len(devices) == 0: print('!! WARNING! No Movidius Devices Found !!') quit() self.movidius = mvnc.Device(devices[0]) self.movidius.OpenDevice() print("-- Movidius Connected") def allocateGraph(self,graphfile): self.graph = self.movidius.AllocateGraph(graphfile) def loadInceptionRequirements(self): self.reqsize = self._configs["ClassifierSettings"]["image_size"] with open(self._configs["ClassifierSettings"]["NetworkPath"] + self._configs["ClassifierSettings"]["InceptionGraph"], mode='rb') as f: self.graphfile = f.read() self.allocateGraph(self.graphfile) print("-- Allocated Graph OK") with open(self._configs["ClassifierSettings"]["NetworkPath"] + 'model/classes.txt', 'r') as f: for line in f: cat = line.split('\n')[0] if cat != 'classes': self.categories.append(cat) f.close() print("-- Categories Loaded OK:", len(self.categories)) def startMQTT(self): try: self.jumpwayClient = JWMQTTdevice.DeviceConnection({ "locationID": self._configs["IoTJumpWay"]["Location"], "zoneID": self._configs["IoTJumpWay"]["Zone"], "deviceId": self._configs["IoTJumpWay"]["Device"], "deviceName": self._configs["IoTJumpWay"]["DeviceName"], "username": self._configs["IoTJumpWayMQTT"]["MQTTUsername"], "password": self._configs["IoTJumpWayMQTT"]["MQTTPassword"] }) except Exception as e: print(str(e)) sys.exit() self.jumpwayClient.connectToDevice() print("-- IoT JumpWay Initiated")
class Classifier(): def __init__(self): self._configs = {} self.movidius = None self.jumpwayClient = None self.graphfile = None self.graph = None self.CheckDevices() self.Helpers = Helpers() self._configs = self.Helpers.loadConfigs() self.loadRequirements() self.startMQTT() print("") print("-- Classifier Initiated") print("") def CheckDevices(self): #mvnc.SetGlobalOption(mvnc.GlobalOption.LOGLEVEL, 2) devices = mvnc.EnumerateDevices() if len(devices) == 0: print('!! WARNING! No Movidius Devices Found !!') quit() self.movidius = mvnc.Device(devices[0]) self.movidius.OpenDevice() print("-- Movidius Connected") def allocateGraph(self, graphfile): self.graph = self.movidius.AllocateGraph(graphfile) def loadRequirements(self): print(self._configs["ClassifierSettings"]["NetworkPath"] + self._configs["ClassifierSettings"]["Graph"]) with open(self._configs["ClassifierSettings"]["NetworkPath"] + self._configs["ClassifierSettings"]["Graph"], mode='rb') as f: self.graphfile = f.read() self.allocateGraph(self.graphfile) print("-- Allocated Graph OK") def startMQTT(self): try: self.jumpwayClient = JWMQTTdevice.DeviceConnection({ "locationID": self._configs["IoTJumpWay"]["Location"], "zoneID": self._configs["IoTJumpWay"]["Zone"], "deviceId": self._configs["IoTJumpWay"]["Device"], "deviceName": self._configs["IoTJumpWay"]["DeviceName"], "username": self._configs["IoTJumpWayMQTT"]["MQTTUsername"], "password": self._configs["IoTJumpWayMQTT"]["MQTTPassword"] }) except Exception as e: print(str(e)) sys.exit() self.jumpwayClient.connectToDevice() print("-- IoT JumpWay Initiated")
class Trainer(): def __init__(self, jumpWay): self.Helpers = Helpers() self.Logging = Logging() self.jumpwayCl = jumpWay self._confs = self.Helpers.loadConfigs() self.LogFile = self.Logging.setLogFile(self._confs["AI"]["Logs"] + "Train/") self.Logging.logMessage(self.LogFile, "LogFile", "INFO", "NLU Trainer LogFile Set") self.Model = Model() self.Data = Data(self.Logging, self.LogFile) self.intentMap = {} self.words = [] self.classes = [] self.dataCorpus = [] self.setupData() self.setupEntities() def setupData(self): self.trainingData = self.Data.loadTrainingData() self.Logging.logMessage(self.LogFile, "Trainer", "INFO", "Loaded NLU Training Data") self.words, self.classes, self.dataCorpus, self.intentMap = self.Data.prepareData( self.trainingData) self.x, self.y = self.Data.finaliseData(self.classes, self.dataCorpus, self.words) self.Logging.logMessage(self.LogFile, "TRAIN", "INFO", "NLU Trainer Data Ready") def setupEntities(self): if self._confs["ClassifierSettings"]["Entities"] == "Mitie": self.entityExtractor = Entities() self.Logging.logMessage(self.LogFile, "TRAIN", "OK", "NLU Trainer Entity Extractor Ready") self.entityExtractor.trainEntities( self._confs["ClassifierSettings"]["Mitie"]["ModelLocation"], self.trainingData) def trainModel(self): while True: self.Logging.logMessage(self.LogFile, "TRAIN", "ACTION", "Ready To Begin Training ? (Yes/No)") userInput = input(">") if userInput == 'Yes': break if userInput == 'No': exit() humanStart, trainingStart = self.Helpers.timerStart() self.Logging.logMessage(self.LogFile, "TRAIN", "INFO", "NLU Model Training At " + humanStart) self.jumpwayCl.publishToDeviceChannel( "Training", { "NeuralNet": "NLU", "Start": trainingStart, "End": "In Progress", "Total": "In Progress", "Message": "NLU Model Training At " + humanStart }) self.Model.trainDNN(self.x, self.y, self.words, self.classes, self.intentMap) trainingEnd, trainingTime, humanEnd = self.Helpers.timerEnd( trainingStart) self.Logging.logMessage( self.LogFile, "TRAIN", "OK", "NLU Model Trained At " + humanEnd + " In " + str(trainingEnd) + " Seconds") self.jumpwayCl.publishToDeviceChannel( "Training", { "NeuralNet": "NLU", "Start": trainingStart, "End": trainingEnd, "Total": trainingTime, "Message": "NLU Model Trained At " + humanEnd + " In " + str(trainingEnd) + " Seconds" })
class Server(): def __init__(self): self._configs = {} self.movidius = None self.cameraStream = None self.imagePath = None self.mean = 128 self.std = 1/128 self.categories = [] self.graphfile = None self.graph = None self.fgraphfile = None self.fgraph = None self.reqsize = None self.Helpers = Helpers() self._configs = self.Helpers.loadConfigs() print("-- Server Initiated") def CheckDevices(self): #mvnc.SetGlobalOption(mvnc.GlobalOption.LOGLEVEL, 2) devices = mvnc.EnumerateDevices() if len(devices) == 0: print('!! WARNING! No Movidius Devices Found !!') quit() self.movidius = mvnc.Device(devices[0]) self.movidius.OpenDevice() print("-- Movidius Connected") def allocateGraph(self, graphfile, graphID): if graphID == "IDC": self.graph = self.movidius.AllocateGraph(graphfile) elif graphID == "TASS": self.fgraph = self.movidius.AllocateGraph(graphfile) def loadRequirements(self, graphID): if graphID == "IDC": self.reqsize = self._configs["ClassifierSettings"]["image_size"] with open(self._configs["ClassifierSettings"]["NetworkPath"] + self._configs["ClassifierSettings"]["InceptionGraph"], mode='rb') as f: self.graphfile = f.read() self.allocateGraph(self.graphfile,"IDC") print("-- Allocated IDC Graph OK") with open(self._configs["ClassifierSettings"]["NetworkPath"] + 'model/classes.txt', 'r') as f: for line in f: cat = line.split('\n')[0] if cat != 'classes': self.categories.append(cat) f.close() print("-- IDC Categories Loaded OK:", len(self.categories)) elif graphID == "TASS": with open(self._configs["ClassifierSettings"]["NetworkPath"] + self._configs["ClassifierSettings"]["Graph"], mode='rb') as f: self.fgraphfile = f.read() self.allocateGraph(self.fgraphfile,"TASS") print("-- Allocated TASS Graph OK")
class Facenet(): def __init__(self): ############################################################### # # Sets up all default requirements and placeholders # ############################################################### self.Helpers = Helpers() self._confs = self.Helpers.loadConfigs() self.LogFile = self.Helpers.setLogFile(self._confs["aiCore"]["Logs"] + "/Foscam") self.OpenCV = OpenCV() def infer(self, image_to_classify, facenet_graph): ############################################################### # # Runs the image through Facenet # ############################################################### resized_image = self.preprocess(image_to_classify) facenet_graph.LoadTensor(resized_image.astype(np.float16), None) output, userobj = facenet_graph.GetResult() return output def match(self, face1_output, face2_output): ############################################################### # # Determines whether two images are a match # ############################################################### if (len(face1_output) != len(face2_output)): self.Helpers.logMessage(self.LogFile, "TASS", "ERROR", "Distance Missmatch") return False total_diff = 0 for output_index in range(0, len(face1_output)): this_diff = np.square(face1_output[output_index] - face2_output[output_index]) total_diff += this_diff self.Helpers.logMessage(self.LogFile, "TASS", "INFO", "Calculated Distance " + str(total_diff)) if (total_diff < 1.3): return True, total_diff else: return False, total_diff def preprocess(self, src): ############################################################### # # Preprocesses an image # ############################################################### NETWORK_WIDTH = 160 NETWORK_HEIGHT = 160 preprocessed_image = cv2.resize(src, (NETWORK_WIDTH, NETWORK_HEIGHT)) preprocessed_image = cv2.cvtColor(preprocessed_image, cv2.COLOR_BGR2RGB) preprocessed_image = self.OpenCV.whiten(preprocessed_image) return preprocessed_image def loadGraph(self, graphID, movidius): ############################################################### # # Loads Facenet graph # ############################################################### if graphID == "Facenet": with open(self._confs["Classifier"]["NetworkPath"] + self._confs["Classifier"]["Graph"], mode='rb') as f: fgraphfile = f.read() fgraph = movidius.AllocateGraph(fgraphfile) self.Helpers.logMessage(self.LogFile, "TASS", "INFO", "Loaded TASS Graph") return fgraph, fgraphfile def CheckDevices(self): ############################################################### # # Checks for Movidius devices and connects to the first device, # if no devices are plugged in the program will quit. # ############################################################### #mvnc.SetGlobalOption(mvnc.GlobalOption.LOGLEVEL, 2) devices = mvnc.EnumerateDevices() if len(devices) == 0: self.Helpers.logMessage(self.LogFile, "TASS", "INFO", "No Movidius Devices, TASS Exiting") quit() movidius = mvnc.Device(devices[0]) movidius.OpenDevice() self.Helpers.logMessage(self.LogFile, "TASS", "INFO", "Connected To Movidius") return movidius, devices, devices[0]
class NLU(): def __init__(self): ############################################################### # # Sets up all default requirements and placeholders # needed for the NLU engine to run. # # - Helpers: Useful global functions # - JumpWay/jumpWayClient: iotJumpWay class and connection # - Logging: Logging class # ############################################################### self.isTraining = False self.ner = None self.Helpers = Helpers() self._confs = self.Helpers.loadConfigs() self.user = {} self.LogFile = self.Helpers.setLogFile(self._confs["aiCore"]["Logs"] + "NLU/") self.ChatLogFile = self.Helpers.setLogFile( self._confs["aiCore"]["Logs"] + "Chat/") self.jumpWay = JumpWay() self.jumpWayClient = self.jumpWay.startMQTT() self.jumpWayClient.subscribeToDeviceChannel( self._confs["iotJumpWay"]["Channels"]["Commands"]) self.jumpWayClient.deviceCommandsCallback = self.commandsCallback def initiateSession(self): ############################################################### # # Initiates empty guest user session, GeniSys will ask the user # verify their GeniSys user by speaking or typing if it does # not know who it is speaking to. # ############################################################### self.userID = 0 if not self.userID in self.user: self.user[self.userID] = {} self.user[self.userID]["history"] = {} def initNLU(self): ############################################################### # # Initiates the NLU setting up the data, NLU / entities models # and required modules such as context and extensions. # ############################################################### self.Data = Data() self.trainingData = self.Data.loadTrainingData() self.trainedData = self.Data.loadTrainedData() self.Model = Model() self.Context = Context() self.Extensions = Extensions() self.restoreData() self.restoreNER() self.restoreNLU() self.initiateSession() self.setThresholds() def commandsCallback(self, topic, payload): ############################################################### # # The callback function that is triggerend in the event of a # command communication from the iotJumpWay. # ############################################################### self.Helpers.logMessage( self.LogFile, "iotJumpWay", "INFO", "Recieved iotJumpWay Command Data : " + str(payload)) commandData = json.loads(payload.decode("utf-8")) def restoreData(self): ############################################################### # # Sets the local trained data using data retrieved above # ############################################################### self.trainedWords = self.trainedData["words"] self.trainedClasses = self.trainedData["classes"] self.x = self.trainedData["x"] self.y = self.trainedData["y"] self.intentMap = self.trainedData["intentMap"][0] def loadEntityController(self): ############################################################### # # Initiates the entity extractor class from tools # ############################################################### self.entityController = Entities() def restoreNER(self): ############################################################### # # Loads entity controller and restores the NER model # ############################################################### self.loadEntityController() self.ner = self.entityController.restoreNER() def restoreNLU(self): ############################################################### # # Restores the NLU model # ############################################################### self.tmodel = self.Model.buildDNN(self.x, self.y) def setThresholds(self): ############################################################### # # Sets the threshold for the NLU engine, this can be changed # using arguments to commandline programs or paramters for # API calls. # ############################################################### self.threshold = self._confs["NLU"]["Threshold"] self.entityThrshld = self._confs["NLU"]["Mitie"]["Threshold"] def communicate(self, sentence): ############################################################### # # First checks to ensure that the program is not training, # then parses any entities that may be in the intent, then # checks context and extensions before providing a response. # ############################################################### if self.isTraining == False: parsed, fallback, entityHolder, parsedSentence = self.entityController.parseEntities( sentence, self.ner, self.trainingData) classification = self.Model.predict(self.tmodel, parsedSentence, self.trainedWords, self.trainedClasses) if len(classification) > 0: clearEntities = False theIntent = self.trainingData["intents"][self.intentMap[ classification[0][0]]] if len(entityHolder) and not len(theIntent["entities"]): clearEntities = True if (self.Context.checkSessionContext(self.user[self.userID], theIntent)): if self.Context.checkClearContext(theIntent, 0): self.user[self.userID]["context"] = "" contextIn, contextOut, contextCurrent = self.Context.setContexts( theIntent, self.user[self.userID]) if not len(entityHolder) and len(theIntent["entities"]): response, entities = self.entityController.replaceResponseEntities( random.choice(theIntent["fallbacks"]), entityHolder) extension, extensionResponses, exEntities = self.Extensions.setExtension( theIntent) elif clearEntities: entityHolder = [] response = random.choice(theIntent["responses"]) extension, extensionResponses, exEntities = self.Extensions.setExtension( theIntent) else: response, entities = self.entityController.replaceResponseEntities( random.choice(theIntent["responses"]), entityHolder) extension, extensionResponses, exEntities = self.Extensions.setExtension( theIntent) if extension != None: classParts = extension.split(".") classFolder = classParts[0] className = classParts[1] theEntities = None if exEntities != False: theEntities = entities module = __import__( classParts[0] + "." + classParts[1], globals(), locals(), [className]) extensionClass = getattr(module, className)() response = getattr(extensionClass, classParts[2])(extensionResponses, theEntities) return { "Response": "OK", "ResponseData": [{ "Received": sentence, "Intent": classification[0][0], "Confidence": str(classification[0][1]), "Response": response, "Context": [{ "In": contextIn, "Out": contextOut, "Current": contextCurrent }], "Extension": extension, "Entities": entityHolder }] } else: self.user[self.userID]["context"] = "" contextIn, contextOut, contextCurrent = self.Context.setContexts( theIntent, self.user[self.userID]) if fallback and fallback in theIntent and len( theIntent["fallbacks"]): response = self.entityController.replaceResponseEntities( random.choice(theIntent["fallbacks"]), entityHolder) extension, extensionResponses = None, [] else: response = self.entityController.replaceResponseEntities( random.choice(theIntent["responses"]), entityHolder) extension, extensionResponses, exEntities = self.Extensions.setExtension( theIntent) if extension != None: classParts = extension.split(".") classFolder = classParts[0] className = classParts[1] theEntities = None if exEntities != False: theEntities = entities module = __import__( classParts[0] + "." + classParts[1], globals(), locals(), [className]) extensionClass = getattr(module, className)() response = getattr(extensionClass, classParts[2])(extensionResponses, theEntities) else: response = self.entityController.replaceResponseEntities( random.choice(theIntent["responses"]), entityHolder) return { "Response": "OK", "ResponseData": [{ "Received": sentence, "Intent": classification[0][0], "Confidence": str(classification[0][1]), "Response": response, "Context": [{ "In": contextIn, "Out": contextOut, "Current": contextCurrent }], "Extension": extension, "Entities": entityHolder }] } else: contextCurrent = self.Context.getCurrentContext( self.user[self.userID]) return { "Response": "FAILED", "ResponseData": [{ "Received": sentence, "Intent": "UNKNOWN", "Confidence": "NA", "Responses": [], "Response": random.choice(self._confs["NLU"]["defaultResponses"]), "Context": [{ "In": "NA", "Out": "NA", "Current": contextCurrent }], "Extension": "NA", "Entities": entityHolder }] } else: return { "Response": "FAILED", "ResponseData": [{ "Status": "Training", "Message": "NLU Engine is currently training" }] }
class Model(): def __init__(self): ############################################################### # # Sets up all default requirements # # - Helpers: Useful global functions # - Data: Data functions # ############################################################### self.Helpers = Helpers() self._confs = self.Helpers.loadConfigs() self.Data = Data() def createDNNLayers(self, x, y): ############################################################### # # Sets up the DNN layers, configuration in required/confs.json # ############################################################### net = tflearn.input_data(shape=[None, len(x[0])]) for i in range(self._confs["NLU"]['FcLayers']): net = tflearn.fully_connected(net, self._confs["NLU"]['FcUnits']) net = tflearn.fully_connected(net, len(y[0]), activation=str( self._confs["NLU"]['Activation'])) if self._confs["NLU"]['Regression']: net = tflearn.regression(net) return net def trainDNN(self, x, y, words, classes, intentMap): ############################################################### # # Trains the DNN, configuration in required/confs.json # ############################################################### tf.reset_default_graph() tmodel = tflearn.DNN( self.createDNNLayers(x, y), tensorboard_dir=self._confs["NLU"]['TFLearn']['Logs'], tensorboard_verbose=self._confs["NLU"]['TFLearn']['LogsLevel']) tmodel.fit(x, y, n_epoch=self._confs["NLU"]['Epochs'], batch_size=self._confs["NLU"]['BatchSize'], show_metric=self._confs["NLU"]['ShowMetric']) self.saveModelData( self._confs["NLU"]['TFLearn']['Data'], { 'words': words, 'classes': classes, 'x': x, 'y': y, 'intentMap': [intentMap] }, tmodel) def saveModelData(self, path, data, tmodel): ############################################################### # # Saves the model data for TFLearn and the NLU engine, # configuration in required/confs.json # ############################################################### tmodel.save(self._confs["NLU"]['TFLearn']['Path']) with open(path, "w") as outfile: json.dump(data, outfile) def buildDNN(self, x, y): ############################################################### # # Loads the DNN model, configuration in required/confs.json # ############################################################### tf.reset_default_graph() tmodel = tflearn.DNN(self.createDNNLayers(x, y)) tmodel.load(self._confs["NLU"]['TFLearn']['Path']) return tmodel def predict(self, tmodel, parsedSentence, trainedWords, trainedClasses): ############################################################### # # Makes a prediction against the trained model, checking the # confidence and then logging the results. # ############################################################### predictions = [[index, confidence] for index, confidence in enumerate( tmodel.predict( [self.Data.makeBagOfWords(parsedSentence, trainedWords)])[0])] predictions.sort(key=lambda x: x[1], reverse=True) classification = [] for prediction in predictions: classification.append( (trainedClasses[prediction[0]], prediction[1])) return classification
class Model(): def __init__(self): self.setup() def setup(self): self.Helpers = Helpers() self._confs = self.Helpers.loadConfigs() def saveModelData(self, path, data): with open(path, "w") as outfile: json.dump(data, outfile) def createDNNLayers(self, x, y): net = tflearn.input_data(shape=[None, len(x[0])]) for i in range(self._confs["ClassifierSettings"]['FcLayers']): net = tflearn.fully_connected( net, self._confs["ClassifierSettings"]['FcUnits']) net = tflearn.fully_connected( net, len(y[0]), activation=str(self._confs["ClassifierSettings"]['Activation'])) net = tflearn.regression(net) return net def trainDNN(self, x, y, words, classes, intentMap): tf.reset_default_graph() tmodel = tflearn.DNN( self.createDNNLayers(x, y), tensorboard_dir=self._confs["ClassifierSettings"]['TFLearn'] ['Logs'], tensorboard_verbose=self._confs["ClassifierSettings"]['TFLearn'] ['LogsLevel']) tmodel.fit(x, y, n_epoch=self._confs["ClassifierSettings"]['Epochs'], batch_size=self._confs["ClassifierSettings"]['BatchSize'], show_metric=self._confs["ClassifierSettings"]['ShowMetric']) tmodel.save(self._confs["ClassifierSettings"]['TFLearn']['Path']) self.saveModelData( self._confs["ClassifierSettings"]['TFLearn']['Data'], { 'words': words, 'classes': classes, 'x': x, 'y': y, 'iMap': [intentMap] }) def buildDNN(self, x, y): tf.reset_default_graph() tmodel = tflearn.DNN(self.createDNNLayers(x, y)) tmodel.load(self._confs["ClassifierSettings"]['TFLearn']['Path']) return tmodel
class JumpWay(): def __init__(self): ############################################################### # # Sets up all default requirements and placeholders # needed for the NLU engine to run. # # - Helpers: Useful global functions # - Logging: Logging class # ############################################################### self.Helpers = Helpers() self._confs = self.Helpers.loadConfigs() self.LogFile = self.Helpers.setLogFile(self._confs["aiCore"]["Logs"] + "JumpWay/") def startMQTT(self): ############################################################### # # Starts an iotJumpWay MQTT connection # ############################################################### try: self.jumpwayClient = jumpWayDevice.DeviceConnection({ "locationID": self._confs["iotJumpWay"]["Location"], "zoneID": self._confs["iotJumpWay"]["Zone"], "deviceId": self._confs["iotJumpWay"]["Device"], "deviceName": self._confs["iotJumpWay"]["DeviceName"], "username": self._confs["iotJumpWay"]["MQTT"]["Username"], "password": self._confs["iotJumpWay"]["MQTT"]["Password"] }) self.jumpwayClient.connectToDevice() self.Helpers.logMessage(self.LogFile, "iotJumpWay", "INFO", "iotJumpWay Client Ready") return self.jumpwayClient except Exception as e: self.Helpers.logMessage(self.LogFile, "iotJumpWay", "INFO", "iotJumpWay Client Initiation Failed") print(str(e)) sys.exit() def createHashMac(self, secret, data): ############################################################### # # Creates a hash # ############################################################### return hmac.new(bytearray(secret.encode("utf-8")), data.encode("utf-8"), digestmod=hashlib.sha256).hexdigest() def apiCall(self, apiUrl, data, headers): self.Helpers.logMessage(self.LogFile, "JUMPWAY", "INFO", "Sending JumpWay REST Request") response = requests.post( apiUrl, data=json.dumps(data), headers=headers, auth=HTTPBasicAuth( self._confs["iotJumpWay"]["App"], self.createHashMac( self._confs["iotJumpWay"]["API"]["Secret"], self._confs["iotJumpWay"]["API"]["Secret"]))) output = json.loads(response.content) self.Helpers.logMessage( self.LogFile, "JUMPWAY", "INFO", "JumpWay REST Response Received: " + str(output)) return output
class Trainer(): ############################################################### # # Sets up all default requirements and placeholders # needed for the NLU engine to run. # # - Helpers: Useful global functions # - JumpWay/jumpWayClient: iotJumpWay class and connection # - Logging: Logging class # ############################################################### def __init__(self, jumpWay): self.Helpers = Helpers() self._confs = self.Helpers.loadConfigs() self.LogFile = self.Helpers.setLogFile(self._confs["aiCore"]["Logs"] + "Train/") self.jumpwayCl = jumpWay self.intentMap = {} self.words = [] self.classes = [] self.dataCorpus = [] self.Model = Model() self.Data = Data() def setupData(self): self.trainingData = self.Data.loadTrainingData() self.words, self.classes, self.dataCorpus, self.intentMap = self.Data.prepareData( self.trainingData) self.x, self.y = self.Data.finaliseData(self.classes, self.dataCorpus, self.words) self.Helpers.logMessage(self.LogFile, "TRAIN", "INFO", "NLU Training Data Ready") def setupEntities(self): if self._confs["NLU"]["Entities"] == "Mitie": self.entityController = Entities() self.entityController.trainEntities( self._confs["NLU"]["Mitie"]["ModelLocation"], self.trainingData) self.Helpers.logMessage(self.LogFile, "TRAIN", "OK", "NLU Trainer Entities Ready") def trainModel(self): while True: self.Helpers.logMessage(self.LogFile, "TRAIN", "ACTION", "Ready To Begin Training ? (Yes/No)") userInput = input(">") if userInput == 'Yes': break if userInput == 'No': exit() self.setupData() self.setupEntities() humanStart, trainingStart = self.Helpers.timerStart() self.jumpwayCl.publishToDeviceChannel( "Training", { "NeuralNet": "NLU", "Start": trainingStart, "End": "In Progress", "Total": "In Progress", "Message": "NLU Model Training At " + humanStart }) self.Model.trainDNN(self.x, self.y, self.words, self.classes, self.intentMap) trainingEnd, trainingTime, humanEnd = self.Helpers.timerEnd( trainingStart) self.Helpers.logMessage( self.LogFile, "TRAIN", "OK", "NLU Model Trained At " + humanEnd + " In " + str(trainingEnd) + " Seconds") self.jumpwayCl.publishToDeviceChannel( "Training", { "NeuralNet": "NLU", "Start": trainingStart, "End": trainingEnd, "Total": trainingTime, "Message": "NLU Model Trained At " + humanEnd + " In " + str(trainingEnd) + " Seconds" })
class Data(): def __init__(self): ############################################################### # # Sets up all default requirements and placeholders # needed for the NLU engine to run. # # - Helpers: Useful global functions # - Logging: Logging class # - LancasterStemmer: Word stemmer # ############################################################### self.ignore = [',', '.', '!', '?'] self.Helpers = Helpers() self._confs = self.Helpers.loadConfigs() self.LogFile = self.Helpers.setLogFile(self._confs["aiCore"]["Logs"] + "JumpWay/") self.LancasterStemmer = LancasterStemmer() def loadTrainingData(self): ############################################################### # # Loads the NLU and NER training data from data/training.json # ############################################################### with open("data/training.json") as jsonData: trainingData = json.load(jsonData) self.Helpers.logMessage(self.LogFile, "Data", "INFO", "Training Data Ready") return trainingData def loadTrainedData(self): ############################################################### # # Loads the saved training configuratuon # ############################################################### with open("model/model.json") as jsonData: modelData = json.load(jsonData) self.Helpers.logMessage(self.LogFile, "Data", "INFO", "Model Data Ready") return modelData def sortList(self, listToSort): ############################################################### # # Sorts a list by sorting the list, and removing duplicates # # https://www.programiz.com/python-programming/methods/built-in/sorted # https://www.programiz.com/python-programming/list # https://www.programiz.com/python-programming/set # ############################################################### return sorted(list(set(listToSort))) def extract(self, data=None, splitIt=False): ############################################################### # # Extracts words from sentences, stripping out characters in # the ignore list above # # https://www.nltk.org/_modules/nltk/stem/lancaster.html # http://insightsbot.com/blog/R8fu5/bag-of-words-algorithm-in-python-introduction # ############################################################### return [ self.LancasterStemmer.stem(word) for word in (data.split() if splitIt == True else data) if word not in self.ignore ] def makeBagOfWords(self, sInput, words): ############################################################### # # Makes a bag of words used by the inference and training # features. If makeBagOfWords is called during training, sInput # will be a list. # # http://insightsbot.com/blog/R8fu5/bag-of-words-algorithm-in-python-introduction # ############################################################### if type(sInput) == list: bagOfWords = [] for word in words: if word in sInput: bagOfWords.append(1) else: bagOfWords.append(0) return bagOfWords else: bagOfWords = np.zeros(len(words)) for cword in self.extract(sInput, True): for i, word in enumerate(words): if word == cword: bagOfWords[i] += 1 return np.array(bagOfWords) def prepareClasses(self, intent, classes): ############################################################### # # Adds an intent key to classes if it does not already exist # ############################################################### if intent not in classes: classes.append(intent) return classes def prepareData(self, trainingData=[], wordsHldr=[], dataCorpusHldr=[], classesHldr=[]): ############################################################### # # Prepares the NLU and NER training data # ############################################################### counter = 0 intentMap = {} for intent in trainingData['intents']: theIntent = intent['intent'] for text in intent['text']: if 'entities' in intent and len(intent['entities']): i = 0 for entity in intent['entities']: tokens = text.replace( trainingData['intents'][counter]["text"][i], "<" + entity["entity"] + ">").lower().split() wordsHldr.extend(tokens) dataCorpusHldr.append((tokens, theIntent)) i = i + 1 else: tokens = text.lower().split() wordsHldr.extend(tokens) dataCorpusHldr.append((tokens, theIntent)) intentMap[theIntent] = counter classesHldr = self.prepareClasses(theIntent, classesHldr) counter = counter + 1 return self.sortList(self.extract( wordsHldr, False)), self.sortList(classesHldr), dataCorpusHldr, intentMap def finaliseData(self, classes, dataCorpus, words): ############################################################### # # Finalises the NLU training data # ############################################################### trainData = [] out = np.zeros(len(classes)) for document in dataCorpus: output = list(out) output[classes.index(document[1])] = 1 trainData.append([ self.makeBagOfWords(self.extract(document[0], False), words), output ]) random.shuffle(trainData) trainData = np.array(trainData) self.Helpers.logMessage(self.LogFile, "Data", "INFO", "Finalised Training Data Ready") return list(trainData[:, 0]), list(trainData[:, 1])
class gHumans(): def __init__(self): ############################################################### # # Sets up all default requirements and placeholders # needed for the NLU engine to run. # # - Helpers: Useful global functions # - JumpWay/jumpWayClient: iotJumpWay class and connection # - Logging: Logging class # ############################################################### self.Helpers = Helpers() self._confs = self.Helpers.loadConfigs() self.JumpWay = JumpWay() self.MySql = MySql() self.MySql.setMysqlCursorRows() self.Logging = Logging() self.LogFile = self.Logging.setLogFile(self._confs["aiCore"]["Logs"] + "Client/") def getHumanByFace(self, response, entities=None): ############################################################### # # Checks to see who was seen in the system camera within the # last few seconds # ############################################################### results = None resultsLength = None try: self.MySql.mysqlDbCur.execute( "SELECT users.id, users.name, users.zone FROM a7fh46_users_logs logs INNER JOIN a7fh46_users users ON logs.uid = users.id WHERE logs.timeSeen > (NOW() - INTERVAL 10 SECOND) " ) results = self.MySql.mysqlDbCur.fetchall() resultsLength = len(results) if resultsLength > 0: if resultsLength == 1: message = "I detected " + str( responseLength) + " human, #" + str( results[0]["id"]) + " " + results[0]["name"] else: message = "I detected " + str(responseLength) + " humans" else: message = "I didn't detect any humans in the system camera feed, please stand in front of the camera" return message except Exception as errorz: print('FAILED1') print(errorz) return results def getCurrentHuman(self, responses, entities=None): ############################################################### # # Checks to see who was seen in the system camera within the # last few seconds # ############################################################### results = None resultsLength = None try: self.MySql.mysqlDbCur.execute( "SELECT users.id, users.name, users.zone FROM a7fh46_user_current currentH INNER JOIN a7fh46_users users ON currentH.uid = users.id WHERE currentH.timeSeen > (NOW() - INTERVAL 1 MINUTE) ORDER BY id DESC LIMIT 1" ) results = self.MySql.mysqlDbCur.fetchone() if results != None: return random.choice(responses).replace( "%%HUMAN%%", results["name"]) else: return "Sorry I could not identify you, this system will now self destruct! You have 5 seconds..." except Exception as errorz: print('FAILED1') print(errorz) return results def updateHuman(self, responses, entities): results = None try: self.MySql.mysqlDbCur.execute( "SELECT id, name FROM a7fh46_users users WHERE name = '%s'" % (entities[0])) results = self.MySql.mysqlDbCur.fetchone() if results != None: timeSeen = datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f') self.MySql.mysqlDbCur.execute( """ INSERT INTO a7fh46_user_current (uid, lid, fid, zid, did, timeSeen) VALUES (%s, %s, %s, %s, %s, %s); """, (results["id"], self._confs["iotJumpWay"]["Location"], 0, self._confs["iotJumpWay"]["Zone"], self._confs["iotJumpWay"]["Device"], timeSeen[:-3])) self.MySql.mysqlDbConn.commit() return random.choice(responses).replace( "%%HUMAN%%", results["name"]) else: return "Sorry I could not identify you, this system will now self destruct! You have 5 seconds..." except Exception as errorz: print('FAILED2') print(errorz) return results
class MySql(): def __init__(self): self.Helpers = Helpers() self._confs = self.Helpers.loadConfigs() self.mysqlDbConn = None self.mysqlDbCur = None self.mysqlConnect() def mysqlConnect(self): try: self.mysqlDbConn = pymysql.connect( host = self._confs["AI"]["IP"], user = self._confs["MySql"]["dbusername"], passwd = self._confs["MySql"]["dbpassword"], db = self._confs["MySql"]["dbname"]) self.mysqlDbCur = self.mysqlDbConn.cursor() except Exception as errorz: print('FAILED') print(errorz) def trackHuman(self, uid, lid, fid, zid, did ): try: timeSeen = datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f') self.mysqlDbCur.execute(""" INSERT INTO a7fh46_users_logs (uid, lid, fid, zid, did, timeSeen) VALUES (%s, %s, %s, %s, %s, %s) ON DUPLICATE KEY UPDATE timeSeen = IF(VALUES(timeSeen) > (NOW() - INTERVAL 2 MINUTE), VALUES(timeSeen), timeSeen); """, (uid, lid, fid, zid, did, timeSeen[:-3])) self.mysqlDbConn.commit() self.mysqlDbCur.execute("UPDATE a7fh46_users SET floor='%s', zone='%s', lastSeen='%s' " % (fid, zid, timeSeen)) self.mysqlDbConn.commit() except Exception as errorz: print('FAILED') print(errorz) self.mysqlDbConn.rollback() def getHuman(self, name): try: self.mysqlDbCur.execute("SELECT id FROM a7fh46_users WHERE name = '%s'" % (name)) except Exception as errorz: print('FAILED') print(errorz) return self.mysqlDbCur.fetchone()
class Streamer(): def __init__(self): ############################################################### # # Sets up all default requirements and placeholders # ############################################################### self.Helpers = Helpers() self._confs = self.Helpers.loadConfigs() self.LogFile = self.Helpers.setLogFile(self._confs["aiCore"]["Logs"] + "/Local") self.MySql = MySql() self.OpenCV = OpenCV() self.OCVframe = None self.font = cv2.FONT_HERSHEY_SIMPLEX self.fontColor = (255, 255, 255) self.fontScale = 1 self.lineType = 1 self.identified = 0 self.Facenet = Facenet() self.movidius, self.devices, self.device = self.Facenet.CheckDevices() self.fgraph, self.fgraphfile = self.Facenet.loadGraph( "Facenet", self.movidius) self.validDir = self._confs["Classifier"]["NetworkPath"] + self._confs[ "Classifier"]["ValidPath"] self.testingDir = self._confs["Classifier"][ "NetworkPath"] + self._confs["Classifier"]["TestingPath"] self.detector = dlib.get_frontal_face_detector() self.predictor = dlib.shape_predictor( self._confs["Classifier"]["Dlib"]) self.connectToCamera() self.tassSocket = None self.configureSocket() self.JumpWay = JumpWay() self.JumpWayCL = self.JumpWay.startMQTT() self.Helpers.logMessage(self.LogFile, "TASS", "INFO", "TASS Ready") def connectToCamera(self): ############################################################### # # Connects to the Foscam device using the configs in # required/confs.json # ############################################################### self.OCVframe = cv2.VideoCapture(0) self.Helpers.logMessage(self.LogFile, "TASS", "INFO", "Connected To Camera") def configureSocket(self): ############################################################### # # Configures the socket we will stream the frames to # ############################################################### self.tassSocket = zmq.Context().socket(zmq.PUB) self.tassSocket.connect("tcp://" + self._confs["Socket"]["host"] + ":" + str(self._confs["Socket"]["port"])) self.Helpers.logMessage( self.LogFile, "TASS", "INFO", "Connected To Socket: tcp://" + self._confs["Socket"]["host"] + ":" + str(self._confs["Socket"]["port"]))
class GeniSys(): def __init__(self): ############################################################### # # Sets up all default requirements and placeholders # needed for the NLU engine to run. # # - Helpers: Useful global functions # - Logging: Logging class # ############################################################### self.Helpers = Helpers() self._confs = self.Helpers.loadConfigs() self.LogFile = self.Helpers.setLogFile(self._confs["aiCore"]["Logs"] + "GeniSys/") def createHashMac(self, secret, data): ############################################################### # # Creates a hash # ############################################################### return hmac.new(bytearray(secret.encode("utf-8")), data.encode("utf-8"), digestmod=hashlib.sha256).hexdigest() def restApiCall(self, apiUrl, data, headers): self.Helpers.logMessage(self.LogFile, "GENISYS", "INFO", "Sending GeniSys REST Request") response = requests.post( apiUrl, data=json.dumps(data), headers=headers, auth=HTTPBasicAuth( str(self._confs["iotJumpWay"]["App"]), self.createHashMac(self._confs["iotJumpWay"]["API"]["key"], self._confs["iotJumpWay"]["API"]["key"]))) output = json.loads(response.text) self.Helpers.logMessage( self.LogFile, "GENISYS", "INFO", "GeniSys REST Response Received: " + str(output)) return output def getHuman(self, name): return self.restApiCall(self._confs["GeniSys"]["RestEndpoint"], { "Call": "getHumanID", "Data": { "name": name } }, {'content-type': 'application/json'}) def trackHuman(self, uid, lid, fid, zid, did): return self.restApiCall( self._confs["GeniSys"]["RestEndpoint"], { "Call": "trackHuman", "Data": { "uid": uid, "lid": lid, "fid": fid, "zid": zid, "did": did } }, {'content-type': 'application/json'})