Ejemplo n.º 1
0
    def __init__(self):
        """ Initializes the class. """

        self.Helpers = Helpers("Model")
        self.Data = Data()

        self.Helpers.logger.info("Model class initialized.")
Ejemplo n.º 2
0
class Trainer():
    """ Trainer Class

	Trains the Natural Language Understanding Engine.
	"""
    def __init__(self):
        """ Initializes the class. """

        self.Helpers = Helpers("Train")

        self.intentMap = {}
        self.words = []
        self.classes = []
        self.dataCorpus = []

        self.Model = Model()
        self.Data = Data()

        self.Helpers.logger.info("Trainer class initialized.")

    def setupData(self):
        """ Prepares the data. """

        self.trainingData = self.Data.loadTrainingData()

        self.words, self.classes, self.dataCorpus, self.intentMap = self.Data.prepareData(
            self.trainingData)
        self.x, self.y = self.Data.finaliseData(self.classes, self.dataCorpus,
                                                self.words)

        self.Helpers.logger.info("NLU Training Data Ready")

    def setupEntities(self):
        """ Prepares the entities. """

        if self.Helpers.confs["NLU"]["Entities"] == "Mitie":
            self.entityController = Entities()
            self.entityController.trainEntities(
                self.Helpers.confs["NLU"]["Mitie"]["ModelLocation"],
                self.trainingData)

            self.Helpers.logger.info("NLU Trainer Entities Ready")

    def trainModel(self):
        """ Trains the model. """

        while True:
            self.Helpers.logger.info("Ready To Begin Training ? (Yes/No)")
            userInput = input(">")

            if userInput == 'Yes': break
            if userInput == 'No': exit()

        self.setupData()
        self.setupEntities()

        self.Model.trainDNN(self.x, self.y, self.words, self.classes,
                            self.intentMap)

        self.Helpers.logger.info("NLU Model Trained")
Ejemplo n.º 3
0
    def do_data(self):
        """ Creates/sorts dataset. """

        self.Data = Data()
        self.Data.do_im_process()

        self.Helpers.logger.info("Data preperation complete.")
Ejemplo n.º 4
0
class ManualAugmentation():
    def __init__(self):

        ###############################################################
        #
        # Sets up all default requirements and placeholders
        # needed for the Acute Myeloid Leukemia Classifier.
        #
        ###############################################################

        self.Data = Data()

    def processDataset(self):

        ###############################################################
        #
        # Make sure you have your equal amounts of positive and negative
        # samples in the Model/Data directories.
        #
        # Only run this function once! it will continually make copies
        # of all images in the Settings->TrainDir directory specified
        # in Required/confs.json
        #
        ###############################################################

        self.Data.processDataset()
Ejemplo n.º 5
0
    def __init__(self):

        ###############################################################
        #
        # Sets up all default requirements and placeholders
        # needed for the Acute Myeloid Leukemia Classifier.
        #
        ###############################################################

        self.Data = Data()
Ejemplo n.º 6
0
    def data(self):
        """ Initializes the data. """

        self.Data = Data()
        self.trainingData = self.Data.loadTrainingData()
        self.trainedData = self.Data.loadTrainedData()

        self.trainedWords = self.trainedData["words"]
        self.trainedClasses = self.trainedData["classes"]
        self.x = self.trainedData["x"]
        self.y = self.trainedData["y"]
        self.intentMap = self.trainedData["intentMap"][0]
Ejemplo n.º 7
0
class Data():
    """ Trainer Data Class

    Sorts the ALL Detection System 2019 NCS1 Classifier training data.
    """
    def __init__(self):
        """ Initializes the Data Class """

        self.Helpers = Helpers("Data")
        self.confs = self.Helpers.confs

        self.DataProcess = DataProcess()
        self.labelsToName = {}

        self.Helpers.logger.info("Data class initialization complete.")

    def sortData(self):
        """ Sorts the training data """

        humanStart, clockStart = self.Helpers.timerStart()

        self.Helpers.logger.info("Loading & preparing training data.")

        dataPaths, classes = self.DataProcess.processFilesAndClasses()

        classId = [int(i) for i in classes]
        classNamesToIds = dict(zip(classes, classId))

        # Divide the training datasets into train and test
        numValidation = int(self.confs["Classifier"]["ValidationSize"] *
                            len(dataPaths))
        self.Helpers.logger.info("Number of classes: " + str(classes))
        self.Helpers.logger.info("Validation data size: " + str(numValidation))
        random.seed(self.confs["Classifier"]["RandomSeed"])
        random.shuffle(dataPaths)
        trainingFiles = dataPaths[numValidation:]
        validationFiles = dataPaths[:numValidation]

        # Convert the training and validation sets
        self.DataProcess.convertToTFRecord('train', trainingFiles,
                                           classNamesToIds)
        self.DataProcess.convertToTFRecord('validation', validationFiles,
                                           classNamesToIds)

        # Write the labels to file
        labelsToClassNames = dict(zip(classId, classes))
        self.DataProcess.writeLabels(labelsToClassNames)

        self.Helpers.logger.info(
            "Loading & preparing training data completed.")

    def cropTestData(self):
        """ Crops the testing data """

        self.DataProcess.cropTestDataset()

        self.Helpers.logger.info("Testing data resized.")
Ejemplo n.º 8
0
class COVID19DN():
    """ COVID19DN Class

    Core COVID-19 Tensorflow DenseNet Classifier wrapper class using Tensroflow 2.
    """

    def __init__(self):
        """ Initializes the class. """

        self.Helpers = Helpers("Core")

        self.Model = Model()

        self.Helpers.logger.info(
            "COVID19DN Tensorflow initialization complete.")

    def do_data(self):
        """ Sorts the training data. """

        self.Data = Data()
        self.Data.process_data(
            self.Data.paths_n_labels())

    def do_train(self):
        """ Creates & trains the model. """

        self.Model.do_model(self.Data)
        self.Model.do_train()
        self.Model.do_evaluate()

    def do_load_model(self):
        """ Loads the model """

        self.Model.load_model_and_weights()

    def do_classify(self):
        """ Loads model and classifies test data """

        self.do_load_model()
        self.Model.test_classifier()

    def do_server(self):
        """ Loads the API server """

        self.do_load_model()
        self.Server = Server(self.Model)
        self.Server.start()

    def do_http_classify(self):
        """ Loads model and classifies test data """

        self.Model.test_http_classifier()
Ejemplo n.º 9
0
    def do_data(self):
        """ Creates/sorts dataset. """

        self.Data = Data(self.optimizer, self.do_augmentation)
        self.Data.data_and_labels_sort()

        if self.do_augmentation == False:
            self.Data.data_and_labels_prepare()
        else:
            self.Data.data_and_labels_augmentation_prepare()

        self.Data.shuffle()
        self.Data.get_split()
Ejemplo n.º 10
0
    def __init__(self):

        self.Helpers = Helpers()
        self._confs = self.Helpers.loadConfigs()
        self.LogFile = self.Helpers.setLogFile(self._confs["aiCore"]["Logs"] +
                                               "Train/")

        self.intentMap = {}
        self.words = []
        self.classes = []
        self.dataCorpus = []

        self.Model = Model()
        self.Data = Data()
Ejemplo n.º 11
0
    def __init__(self):
        """ Initializes the Training class. """

        self.Helpers = Helpers()
        self.LogFile = self.Helpers.setLogFile(
            self.Helpers.confs["System"]["Logs"] + "Train/")

        self.intentMap = {}
        self.words = []
        self.classes = []
        self.dataCorpus = []

        self.Model = Model()
        self.Data = Data()
Ejemplo n.º 12
0
	def __init__(self):
		""" Initializes the class. """

		self.Helpers = Helpers("Train")

		self.intentMap = {}
		self.words = []
		self.classes = []
		self.dataCorpus = []

		self.Model = Model()
		self.Data = Data()

		self.Helpers.logger.info("Trainer class initialized.")
Ejemplo n.º 13
0
    def __init__(self):

        ###############################################################
        #
        # Sets up all default requirements
        #
        # - Helpers: Useful global functions
        # - Data: Data functions
        #
        ###############################################################

        self.Helpers = Helpers()
        self.confs = self.Helpers.loadConfigs()

        self.Data = Data()
def genAmazonData():
    dataset = Dataset()
    arq = open('Datasets/AmazonDatasetTest.txt', 'r', encoding='utf-8')
    i = 0
    while True:
        try:
            line = arq.readline()
            if line == "":
                break
            print(i)
            i = i + 1

            # Reading labels and data
            label = int(line[9])
            label = 0 if label == 1 else 1
            text = line[11:len(line) - 1]

            # Tokenizing and lemmatizing
            text = to_process(text)

            data = Data(doc=text, label=label)
            dataset.add(data)
        except EOFError:
            break

    with open('amazon_dataset', 'wb') as fp:
        pickle.dump(dataset, fp)

    return dataset
Ejemplo n.º 15
0
def genRottenData():
    dataset = Dataset()

    src = ['.neg', '.pos']
    i = 0
    j = 0

    for a in src:
        arq = open('Datasets/rotten/rt-polarity' + a, 'r')

        while True:
            print(j)
            j = j + 1
            try:
                # Reading labels and data
                text = arq.readline()
                label = i

                if text == "":
                    break

                # Tokenizing and lemmatizing
                text = to_process(text)

                data = Data(doc=text, label=label)
                dataset.add(data)
            except EOFError:
                break
        i = i + 1

    with open('Datasets/rotten_dataset', 'wb') as fp:
        pickle.dump(dataset, fp)
    return dataset
Ejemplo n.º 16
0
class IdcQnn():
    """ IdcQnn

    IdcQnn is a wrapper class that creates the Invasive Ductal Carcinoma
    Tensorflow QNN (Quantum Neural Network).
    """
    def __init__(self):
        """ Initializes the class. """

        self.Helpers = Helpers("Core")
        self.QModel = QModel()

        self.Helpers.logger.info("IdcQnn QNN initialization complete.")

    def do_data(self):
        """ Sorts the training data """

        self.Data = Data()
        self.Data.get_paths_n_labels()
        self.Data.process_data()

    def do_train(self):
        """ Creates & trains the QNN. 
        
        CREDIT: https://www.tensorflow.org/quantum/tutorials/mnist
        CREDIT: https://arxiv.org/pdf/1802.06002.pdf 
        """

        self.QMNIST = QMNIST()

        # "Quantumize" the training data
        X_train_bin, X_test_bin = self.QMNIST.encode_data_as_binary(
            self.Data.X_train, self.Data.X_test)
        X_train_circ, X_test_circ = self.QMNIST.do_circuit_conversion(
            X_train_bin, X_test_bin)
        x_train_tfcirc, x_test_tfcirc = self.QMNIST.convert_to_tensors(
            X_train_circ, X_test_circ)

        # Create the Quantum Neural Network
        model_circuit, model_readout = self.QModel.create_quantum_model()
        self.QModel.create_keras_model(model_circuit, model_readout)

        # Train the Quantum Neural Network
        self.QModel.train_model(x_train_tfcirc, x_test_tfcirc,
                                self.Data.y_train, self.Data.y_test)
        self.QModel.do_evaluate(x_train_tfcirc, x_test_tfcirc,
                                self.Data.y_test)
Ejemplo n.º 17
0
class Data():
        
    ###############################################################
    #
    # Core Data class wrapper.
    #
    ###############################################################

    def __init__(self):
        
        ###############################################################
        #
        # Sets up all default requirements and placeholders 
        # needed for this class. 
        #
        ###############################################################
        
        self.Helpers = Helpers()
        self.confs = self.Helpers.loadConfs()
        self.logFile = self.Helpers.setLogFile(self.confs["Settings"]["Logs"]["DataLogDir"])
        
        self.DataProcess = DataProcess()
        self.labelsToName = {}
        
        self.Helpers.logMessage(self.logFile, "init", "INFO", "Init complete")

    def sortData(self):
        
        ###############################################################
        #
        # Sorts the data 
        #
        ###############################################################

        humanStart, clockStart = self.Helpers.timerStart()
        self.Helpers.logMessage(self.logFile, "sortData", "INFO", "Loading & preparing training data")
        
        dataPaths, classes = self.DataProcess.processFilesAndClasses()

        classId = [ int(i) for i in classes]
        classNamesToIds = dict(zip(classes, classId))

        # Divide the training datasets into train and test
        numValidation = int(self.confs["Classifier"]["ValidationSize"] * len(dataPaths))
        self.Helpers.logMessage(self.logFile, "sortData", "Validation Size", str(numValidation))
        self.Helpers.logMessage(self.logFile, "sortData", "Class Size", str(len(classes)))
        random.seed(self.confs["Classifier"]["RandomSeed"])
        random.shuffle(dataPaths)
        trainingFiles = dataPaths[numValidation:]
        validationFiles = dataPaths[:numValidation]

        # Convert the training and validation sets
        self.DataProcess.convertToTFRecord('train', trainingFiles, classNamesToIds)
        self.DataProcess.convertToTFRecord('validation', validationFiles, classNamesToIds)

        # Write the labels to file
        labelsToClassNames = dict(zip(classId, classes))
        self.DataProcess.writeLabels(labelsToClassNames)
        self.Helpers.logMessage(self.logFile, "sortData", "COMPLETE", "Completed sorting data!")
Ejemplo n.º 18
0
    def __init__(self):
        """ Initializes the Movidius NCS1 Classifier Data Class """

        self.Helpers = Helpers("Data")
        self.confs = self.Helpers.confs

        self.DataProcess = DataProcess()
        self.labelsToName = {}

        self.Helpers.logger.info("Data class initialization complete.")
class Augmentation():
    """ ALL Detection System 2019 Manual Data Augmentation Class

    Manual data augmentation wrapper class for the ALL Detection System 2019 Data Augmentation project.
    """
    def __init__(self):
        """ Initializes the Augmentation class. """

        self.Data = Data()

    def processDataset(self):
        """ Processes the AML/ALL Detection System Dataset. 
        Make sure you have your equal amounts of positive and negative
        samples in the Model/Data directories.

        Only run this function once! it will continually make copies
        of all images in the Settings->TrainDir directory specified
        in Required/confs.json
        """

        self.Data.processDataset()
Ejemplo n.º 20
0
    def initNLU(self):
        """ Initializes a Chatbot sesiion. 
        
        Initiates the NLU setting up the data, NLU / entities models 
        and required modules such as context and extensions.
        """

        self.Data = Data()
        self.trainingData = self.Data.loadTrainingData()
        self.trainedData = self.Data.loadTrainedData()

        self.Model = Model()
        self.Context = Context()
        self.Extensions = Extensions()

        self.restoreData()
        self.restoreNER()
        self.restoreNLU()

        self.initiateSession()
        self.setThresholds()
Ejemplo n.º 21
0
    def divide(self, num_docs):
        pos = 0
        neg = 0

        result = Dataset()

        for i in range(len(self.labels)):
            text = self.docs[i]
            label = self.labels[i]
            if label == 1 and pos < num_docs / 2:
                data = Data(doc=text, label=label)

                result.add(data)
                pos = pos + 1

            elif label == 0 and neg < num_docs / 2:
                data = Data(doc=text, label=label)

                result.add(data)
                neg = neg + 1

        return result
Ejemplo n.º 22
0
class QMNISTCOVID19():
    """ QMNISTCOVID19

    QMNISTCOVID19 is a wrapper class that creates the COVID-19 Tensorflow QNN (Quantum Neural Network).
    """
    def __init__(self):
        """ Initializes the class. """

        self.Helpers = Helpers("Core")

        self.Helpers.logger.info("QMNISTCOVID19 QNN initialization complete.")

    def do_data(self):
        """ Sorts the training data """

        self.Data = Data()
        self.Data.get_paths_n_labels()
        self.Data.process_data()

    def do_train(self):
        """ Creates & trains the QNN. """

        self.QMNIST = QMNIST()

        X_train_bin, X_test_bin = self.QMNIST.encode_as_binary(
            self.Data.X_train, self.Data.X_test)
        X_train_circ, X__test_circ = self.QMNIST.do_circuit_conversion(
            X_train_bin, X_test_bin)
        x_train_tfcirc, x_test_tfcirc = self.QMNIST.convert_to_tensors(
            X_train_circ, X__test_circ)

    def do_load_model(self):
        """ Loads the trained model """
        print("TODO")

    def do_classify(self):
        """ Classifies the test data """
        print("TODO")
Ejemplo n.º 23
0
 def __init__(self):
     
     ###############################################################
     #
     # Sets up all default requirements and placeholders 
     # needed for this class. 
     #
     ###############################################################
     
     self.Helpers = Helpers()
     self.confs = self.Helpers.loadConfs()
     self.logFile = self.Helpers.setLogFile(self.confs["Settings"]["Logs"]["DataLogDir"])
     
     self.DataProcess = DataProcess()
     self.labelsToName = {}
     
     self.Helpers.logMessage(self.logFile, "init", "INFO", "Init complete")
    def __init__(self):
        """ Initializes the Model class. """

        self.Helpers = Helpers()
        self.Data = Data()
class Model():
    """ ALL Detection System 2019 Model Class

    Model class for the ALL Detection System 2019 Chatbot. 
    """
    def __init__(self):
        """ Initializes the Model class. """

        self.Helpers = Helpers()
        self.Data = Data()

    def createDNNLayers(self, x, y):
        """ Sets up the DNN layers """

        net = tflearn.input_data(shape=[None, len(x[0])])

        for i in range(self.Helpers.confs["NLU"]['FcLayers']):
            net = tflearn.fully_connected(net,
                                          self.Helpers.confs["NLU"]['FcUnits'])
        net = tflearn.fully_connected(
            net,
            len(y[0]),
            activation=str(self.Helpers.confs["NLU"]['Activation']))

        if self.Helpers.confs["NLU"]['Regression']:
            net = tflearn.regression(net)

        return net

    def trainDNN(self, x, y, words, classes, intentMap):
        """ Trains the DNN """

        tf.reset_default_graph()

        tmodel = tflearn.DNN(
            self.createDNNLayers(x, y),
            tensorboard_dir=self.Helpers.confs["NLU"]['TFLearn']['Logs'],
            tensorboard_verbose=self.Helpers.confs["NLU"]['TFLearn']
            ['LogsLevel'])

        tmodel.fit(x,
                   y,
                   n_epoch=self.Helpers.confs["NLU"]['Epochs'],
                   batch_size=self.Helpers.confs["NLU"]['BatchSize'],
                   show_metric=self.Helpers.confs["NLU"]['ShowMetric'])

        self.saveModelData(
            self.Helpers.confs["NLU"]['TFLearn']['Data'], {
                'words': words,
                'classes': classes,
                'x': x,
                'y': y,
                'intentMap': [intentMap]
            }, tmodel)

    def saveModelData(self, path, data, tmodel):
        """ Saves the model data """

        tmodel.save(self.Helpers.confs["NLU"]['TFLearn']['Path'])

        with open(path, "w") as outfile:
            json.dump(data, outfile)

    def buildDNN(self, x, y):
        """ Loads the DNN model """

        tf.reset_default_graph()
        tmodel = tflearn.DNN(self.createDNNLayers(x, y))
        tmodel.load(self.Helpers.confs["NLU"]['TFLearn']['Path'])
        return tmodel

    def predict(self, tmodel, parsedSentence, trainedWords, trainedClasses):
        """ Makes a prediction """

        predictions = [[index, confidence] for index, confidence in enumerate(
            tmodel.predict(
                [self.Data.makeBagOfWords(parsedSentence, trainedWords)])[0])]
        predictions.sort(key=lambda x: x[1], reverse=True)

        classification = []
        for prediction in predictions:
            classification.append(
                (trainedClasses[prediction[0]], prediction[1]))

        return classification
Ejemplo n.º 26
0
class Engine():
    """ Engine Class

	Core functions for the NLU Engine.
	"""
    def __init__(self, isAudio):
        """ Initializes the class. """

        self.Helpers = Helpers("Engine")

        self.ner = None
        self.user = {}

        #self.bluetoothCon()
        self.data()
        self.entities()
        #self.iotJumpWayCon()
        self.model()
        self.session()
        self.thresholds()

        if isAudio:
            self.speech()

        self.Helpers.logger.info("Engine class initialized.")

    def bluetoothCon(self):
        """ Initializes the Bluetooth connection. """

        self.Bluetooth = BluetoothConnect()
        self.Bluetooth.connect()

    def data(self):
        """ Initializes the data. """

        self.Data = Data()
        self.trainingData = self.Data.loadTrainingData()
        self.trainedData = self.Data.loadTrainedData()

        self.trainedWords = self.trainedData["words"]
        self.trainedClasses = self.trainedData["classes"]
        self.x = self.trainedData["x"]
        self.y = self.trainedData["y"]
        self.intentMap = self.trainedData["intentMap"][0]

    def doExtension(self, extension, entities, exEntities, extensionResponses):
        """ Executes an extension. """

        classParts = extension.split(".")
        classFolder = classParts[0]
        className = classParts[1]
        theEntities = None

        if exEntities != False:
            theEntities = entities

        module = __import__(classParts[0] + "." + classParts[1], globals(),
                            locals(), [className])
        extensionClass = getattr(module, className)()
        response = getattr(extensionClass, classParts[2])(extensionResponses,
                                                          theEntities)

        return response

    def entities(self):
        """ Initializes the entities. """

        self.entityController = Entities()
        self.ner = self.entityController.restoreNER()

    def entitiesCheck(self, entityHolder, theIntent, clearEntities):
        """ Checks entities. """

        if not len(entityHolder) and len(theIntent["entities"]):
            response, entities = self.entityController.replaceResponseEntities(
                random.choice(theIntent["fallbacks"]), entityHolder)
            extension, extensionResponses, exEntities = self.Extensions.setExtension(
                theIntent)
        elif clearEntities:
            entities = []
            response = random.choice(theIntent["responses"])
            extension, extensionResponses, exEntities = self.Extensions.setExtension(
                theIntent)
        else:
            response, entities = self.entityController.replaceResponseEntities(
                random.choice(theIntent["responses"]), entityHolder)
            extension, extensionResponses, exEntities = self.Extensions.setExtension(
                theIntent)

        return response, entities, extension, extensionResponses, exEntities

    def fallbackCheck(self, fallback, theIntent, entityHolder):
        """ Checks if fallback. """

        if fallback and fallback in theIntent and len(theIntent["fallbacks"]):
            response, entities = self.entityController.replaceResponseEntities(
                random.choice(theIntent["fallbacks"]), entityHolder)
            extension, extensionResponses, exEntities = None, [], None
        else:
            response, entities = self.entityController.replaceResponseEntities(
                random.choice(theIntent["responses"]), entityHolder)
            extension, extensionResponses, exEntities = self.Extensions.setExtension(
                theIntent)

        return response, entities, extension, extensionResponses, exEntities

    def model(self):
        """ Initializes the model. """

        self.Model = Model()
        self.Context = Context()
        self.Extensions = Extensions()

        self.tmodel = self.Model.buildDNN(self.x, self.y)

    def session(self):
        """ Initializes a NLU sesiion.

		Initiates empty guest user session, GeniSys will ask the user
		verify their GeniSys user by speaking or typing if it does
		not know who it is speaking to.
		"""

        self.userID = 0
        if not self.userID in self.user:
            self.user[self.userID] = {}
            self.user[self.userID]["history"] = {}

    def respond(self, status, sentence, intent, confidence, response, cIn,
                cOut, cCurrent, extension, entities):
        """ Forms the response. """

        return {
            "Response":
            status,
            "ResponseData": [{
                "Received":
                sentence,
                "Intent":
                intent,
                "Confidence":
                confidence,
                "Response":
                response,
                "Context": [{
                    "In": cIn,
                    "Out": cOut,
                    "Current": cCurrent
                }],
                "Extension":
                extension,
                "Entities":
                entities
            }]
        }

    def speech(self):
        """ Initializes the TTS feature. """

        self.TTS = TTS()

    def thresholds(self):
        """ Sets thresholds

		Sets the threshold for the NLU engine, this can be changed
		using arguments to commandline programs or paramters for
		API calls.
		"""

        self.threshold = self.Helpers.confs["NLU"]["Threshold"]
        self.entityThrshld = self.Helpers.confs["NLU"]["Mitie"]["Threshold"]
Ejemplo n.º 27
0
class Model():
    """ Model Class

	Model functions for the OneAPI Acute Lymphoblastic Leukemia Classifier CNN.
	"""
    def __init__(self):
        """ Initializes the class. """

        self.Helpers = Helpers("Model", False)

        os.environ["KMP_BLOCKTIME"] = "1"
        os.environ["KMP_SETTINGS"] = "1"
        os.environ["KMP_AFFINITY"] = "granularity=fine,verbose,compact,1,0"
        os.environ["OMP_NUM_THREADS"] = str(
            self.Helpers.confs["cnn"]["system"]["cores"])
        tf.config.threading.set_inter_op_parallelism_threads(1)
        tf.config.threading.set_intra_op_parallelism_threads(
            self.Helpers.confs["cnn"]["system"]["cores"])

        self.testing_dir = self.Helpers.confs["cnn"]["data"]["test"]
        self.valid = self.Helpers.confs["cnn"]["data"]["valid_types"]
        self.seed = self.Helpers.confs["cnn"]["data"]["seed"]

        self.weights_file = self.Helpers.confs["cnn"]["model"]["weights"]
        self.model_json = self.Helpers.confs["cnn"]["model"]["model"]

        random.seed(self.seed)
        seed(self.seed)
        tf.random.set_seed(self.seed)

        self.Helpers.logger.info("Class initialization complete.")

    def do_data(self):
        """ Creates/sorts dataset. """

        self.Data = Data()
        self.Data.do_im_process()

        self.Helpers.logger.info("Data preperation complete.")

    def do_network(self):
        """ Builds the network.

		Replicates the networked outlined in the  Acute Leukemia Classification
		Using Convolution Neural Network In Clinical Decision Support System paper
		using Tensorflow 2.0.
		https://airccj.org/CSCP/vol7/csit77505.pdf
		"""

        self.val_steps = self.Helpers.confs["cnn"]["train"]["val_steps"]
        self.batch_size = self.Helpers.confs["cnn"]["train"]["batch"]
        self.epochs = self.Helpers.confs["cnn"]["train"]["epochs"]

        self.tf_model = tf.keras.models.Sequential([
            tf.keras.layers.ZeroPadding2D(
                padding=(2, 2), input_shape=self.Data.X_train.shape[1:]),
            tf.keras.layers.Conv2D(
                30, (5, 5), strides=1, padding="valid", activation='relu'),
            tf.keras.layers.ZeroPadding2D(padding=(2, 2)),
            tf.keras.layers.Conv2D(
                30, (5, 5), strides=1, padding="valid", activation='relu'),
            tf.keras.layers.MaxPooling2D(
                pool_size=(2, 2), strides=2, padding='valid'),
            tf.keras.layers.Flatten(),
            tf.keras.layers.Dense(2),
            tf.keras.layers.Activation('softmax')
        ], "ALLoneAPI_CNN")
        self.tf_model.summary()
        self.Helpers.logger.info("Network initialization complete.")

    def do_train(self):
        """ Trains the network. """

        self.Helpers.logger.info("Using Adam Optimizer.")
        optimizer = tf.keras.optimizers.Adam(
            lr=self.Helpers.confs["cnn"]["train"]["learning_rate_adam"],
            decay=self.Helpers.confs["cnn"]["train"]["decay_adam"])

        self.tf_model.compile(optimizer=optimizer,
                              loss='binary_crossentropy',
                              metrics=[
                                  tf.keras.metrics.BinaryAccuracy(name='acc'),
                                  tf.keras.metrics.Precision(name='precision'),
                                  tf.keras.metrics.Recall(name='recall'),
                                  tf.keras.metrics.AUC(name='auc')
                              ])

        self.history = self.tf_model.fit(self.Data.X_train,
                                         self.Data.y_train,
                                         validation_data=(self.Data.X_test,
                                                          self.Data.y_test),
                                         validation_steps=self.val_steps,
                                         epochs=self.epochs)

        print(self.history)
        print("")

        self.freeze_model()
        self.save_model_as_json()
        self.save_weights()

    def do_evaluate(self):
        """ Evaluates the model """

        self.do_predictions()

        metrics = self.tf_model.evaluate(self.Data.X_test,
                                         self.Data.y_test,
                                         verbose=0)
        for name, value in zip(self.tf_model.metrics_names, metrics):
            self.Helpers.logger.info("Metrics: " + name + " " + str(value))
        print()

        self.visualize_metrics()
        self.confusion_matrix()
        self.figures_of_merit()

    def do_predictions(self):
        """ Makes predictions on the train & test sets. """

        self.train_preds = self.tf_model.predict(self.Data.X_train)
        self.test_preds = self.tf_model.predict(self.Data.X_test)

        self.Helpers.logger.info("Training predictions: " +
                                 str(self.train_preds))
        self.Helpers.logger.info("Testing predictions: " +
                                 str(self.test_preds))
        print("")

    def visualize_metrics(self):
        """ Visualize the metrics. """

        plt.plot(self.history.history['acc'])
        plt.plot(self.history.history['val_acc'])
        plt.title('Model Accuracy')
        plt.ylabel('Accuracy')
        plt.xlabel('Epoch')
        plt.ylim((0, 1))
        plt.legend(['Train', 'Validate'], loc='upper left')
        plt.savefig('Model/Plots/Accuracy.png')
        plt.show()
        plt.clf()

        plt.plot(self.history.history['loss'])
        plt.plot(self.history.history['val_loss'])
        plt.title('Model Loss')
        plt.ylabel('loss')
        plt.xlabel('Epoch')
        plt.legend(['Train', 'Validate'], loc='upper left')
        plt.savefig('Model/Plots/Loss.png')
        plt.show()
        plt.clf()

        plt.plot(self.history.history['auc'])
        plt.plot(self.history.history['val_auc'])
        plt.title('Model AUC')
        plt.ylabel('AUC')
        plt.xlabel('Epoch')
        plt.legend(['Train', 'Validate'], loc='upper left')
        plt.savefig('Model/Plots/AUC.png')
        plt.show()
        plt.clf()

        plt.plot(self.history.history['precision'])
        plt.plot(self.history.history['val_precision'])
        plt.title('Model Precision')
        plt.ylabel('Precision')
        plt.xlabel('Epoch')
        plt.legend(['Train', 'Validate'], loc='upper left')
        plt.savefig('Model/Plots/Precision.png')
        plt.show()
        plt.clf()

        plt.plot(self.history.history['recall'])
        plt.plot(self.history.history['val_recall'])
        plt.title('Model Recall')
        plt.ylabel('Recall')
        plt.xlabel('Epoch')
        plt.legend(['Train', 'Validate'], loc='upper left')
        plt.savefig('Model/Plots/Recall.png')
        plt.show()
        plt.clf()

    def confusion_matrix(self):
        """ Prints/displays the confusion matrix. """

        self.matrix = confusion_matrix(self.Data.y_test.argmax(axis=1),
                                       self.test_preds.argmax(axis=1))

        self.Helpers.logger.info("Confusion Matrix: " + str(self.matrix))
        print("")

        plt.imshow(self.matrix, cmap=plt.cm.Blues)
        plt.xlabel("Predicted labels")
        plt.ylabel("True labels")
        plt.xticks([], [])
        plt.yticks([], [])
        plt.title('Confusion matrix ')
        plt.colorbar()
        plt.savefig('Model/Plots/Confusion-Matrix.png')
        plt.show()
        plt.clf()

    def figures_of_merit(self):
        """ Calculates/prints the figures of merit.

		https://homes.di.unimi.it/scotti/all/
		"""

        test_len = len(self.Data.X_test)

        TP = self.matrix[1][1]
        TN = self.matrix[0][0]
        FP = self.matrix[0][1]
        FN = self.matrix[1][0]

        TPP = (TP * 100) / test_len
        FPP = (FP * 100) / test_len
        FNP = (FN * 100) / test_len
        TNP = (TN * 100) / test_len

        specificity = TN / (TN + FP)

        misc = FP + FN
        miscp = (misc * 100) / test_len

        self.Helpers.logger.info("True Positives: " + str(TP) + "(" +
                                 str(TPP) + "%)")
        self.Helpers.logger.info("False Positives: " + str(FP) + "(" +
                                 str(FPP) + "%)")
        self.Helpers.logger.info("True Negatives: " + str(TN) + "(" +
                                 str(TNP) + "%)")
        self.Helpers.logger.info("False Negatives: " + str(FN) + "(" +
                                 str(FNP) + "%)")

        self.Helpers.logger.info("Specificity: " + str(specificity))
        self.Helpers.logger.info("Misclassification: " + str(misc) + "(" +
                                 str(miscp) + "%)")

    def freeze_model(self):
        """ Saves the model weights. """

        tf.saved_model.save(
            self.tf_model,
            self.Helpers.confs["cnn"]["model"]["saved_model_dir"])

        fmodel = tf.function(lambda x: self.tf_model(x))
        fmodel = fmodel.get_concrete_function(x=tf.TensorSpec(
            self.tf_model.inputs[0].shape, self.tf_model.inputs[0].dtype))

        freeze = convert_variables_to_constants_v2(fmodel)
        freeze.graph.as_graph_def()

        layers = [op.name for op in freeze.graph.get_operations()]
        self.Helpers.logger.info("Frozen model layers")
        for layer in layers:
            self.Helpers.logger.info(layer)

        self.Helpers.logger.info("Frozen model inputs")
        self.Helpers.logger.info(freeze.inputs)
        self.Helpers.logger.info("Frozen model outputs")
        self.Helpers.logger.info(freeze.outputs)

        tf.io.write_graph(
            graph_or_graph_def=freeze.graph,
            logdir=self.Helpers.confs["cnn"]["model"]["freezing_log_dir"],
            name=self.Helpers.confs["cnn"]["model"]["frozen"],
            as_text=False)

    def save_weights(self):
        """ Saves the model weights. """

        self.tf_model.save_weights(self.weights_file)
        self.Helpers.logger.info("Weights saved " + self.weights_file)

    def save_model_as_json(self):
        """ Saves the model to JSON. """

        with open(self.model_json, "w") as file:
            file.write(self.tf_model.to_json())

        self.Helpers.logger.info("Model JSON saved " + self.model_json)

    def load_model_and_weights(self):
        """ Loads the model and weights. """

        with open(self.model_json) as file:
            m_json = file.read()

        self.tf_model = tf.keras.models.model_from_json(m_json)
        self.tf_model.load_weights(self.weights_file)

        self.Helpers.logger.info("Model loaded ")

        self.tf_model.summary()

    def test_classifier(self):
        """ Tests the trained model. """

        files = 0
        tp = 0
        fp = 0
        tn = 0
        fn = 0
        totaltime = 0

        for testFile in os.listdir(self.testing_dir):
            if os.path.splitext(testFile)[1] in self.valid:

                files += 1
                fileName = self.testing_dir + "/" + testFile

                start = time.time()
                img = cv2.imread(fileName).astype(np.float32)
                self.Helpers.logger.info("Loaded test image " + fileName)

                img = cv2.resize(img,
                                 (self.Helpers.confs["cnn"]["data"]["dim"],
                                  self.Helpers.confs["cnn"]["data"]["dim"]))
                img = self.reshape(img)

                prediction = self.get_predictions(img)
                end = time.time()
                benchmark = end - start
                totaltime += benchmark

                msg = ""
                if prediction == 1 and "_1." in testFile:
                    tp += 1
                    msg = "Acute Lymphoblastic Leukemia correctly detected (True Positive) in " + str(
                        benchmark) + " seconds."
                elif prediction == 1 and "_0." in testFile:
                    fp += 1
                    msg = "Acute Lymphoblastic Leukemia incorrectly detected (False Positive) in " + str(
                        benchmark) + " seconds."
                elif prediction == 0 and "_0." in testFile:
                    tn += 1
                    msg = "Acute Lymphoblastic Leukemia correctly not detected (True Negative) in " + str(
                        benchmark) + " seconds."
                elif prediction == 0 and "_1." in testFile:
                    fn += 1
                    msg = "Acute Lymphoblastic Leukemia incorrectly not detected (False Negative) in " + str(
                        benchmark) + " seconds."
                self.Helpers.logger.info(msg)

        self.Helpers.logger.info("Images Classifier: " + str(files))
        self.Helpers.logger.info("True Positives: " + str(tp))
        self.Helpers.logger.info("False Positives: " + str(fp))
        self.Helpers.logger.info("True Negatives: " + str(tn))
        self.Helpers.logger.info("False Negatives: " + str(fn))
        self.Helpers.logger.info("Total Time Taken: " + str(totaltime))

    def send_request(self, img_path):
        """ Sends image to the inference API endpoint. """

        self.Helpers.logger.info("Sending request for: " + img_path)

        _, img_encoded = cv2.imencode('.png', cv2.imread(img_path))
        response = requests.post(self.addr,
                                 data=img_encoded.tostring(),
                                 headers=self.headers)
        response = json.loads(response.text)

        return response

    def test_http_classifier(self):
        """ Tests the trained model via HTTP. """

        msg = ""

        files = 0
        tp = 0
        fp = 0
        tn = 0
        fn = 0

        self.addr = "http://" + self.Helpers.confs["cnn"]["system"]["server"] + \
         ':'+str(self.Helpers.confs["cnn"]["system"]["port"]) + '/Inference'
        self.headers = {'content-type': 'image/jpeg'}

        for data in os.listdir(self.testing_dir):
            if os.path.splitext(data)[1] in self.valid:

                response = self.send_request(self.testing_dir + "/" + data)

                msg = ""
                if response["Diagnosis"] == "Positive" and "_1." in data:
                    tp += 1
                    msg = "Acute Lymphoblastic Leukemia correctly detected (True Positive)"
                elif response["Diagnosis"] == "Positive" and "_0." in data:
                    fp += 1
                    msg = "Acute Lymphoblastic Leukemia incorrectly detected (False Positive)"
                elif response["Diagnosis"] == "Negative" and "_0." in data:
                    tn += 1
                    msg = "Acute Lymphoblastic Leukemia correctly not detected (True Negative)"
                elif response["Diagnosis"] == "Negative" and "_1." in data:
                    fn += 1
                    msg = "Acute Lymphoblastic Leukemia incorrectly not detected (False Negative)"

                files += 1

                self.Helpers.logger.info(msg)
                print()
                time.sleep(7)

        self.Helpers.logger.info("Images Classifier: " + str(files))
        self.Helpers.logger.info("True Positives: " + str(tp))
        self.Helpers.logger.info("False Positives: " + str(fp))
        self.Helpers.logger.info("True Negatives: " + str(tn))
        self.Helpers.logger.info("False Negatives: " + str(fn))

    def http_classify(self, req):
        """ Classifies an image sent via HTTP. """

        if len(req.files) != 0:
            img = np.fromstring(req.files['file'].read(), np.uint8)
        else:
            img = np.fromstring(req.data, np.uint8)

        img = cv2.imdecode(img, cv2.IMREAD_UNCHANGED)
        img = cv2.resize(img, (self.Helpers.confs["cnn"]["data"]["dim"],
                               self.Helpers.confs["cnn"]["data"]["dim"]))
        img = self.reshape(img)

        return self.get_predictions(img)

    def vr_http_classify(self, img):
        """ Classifies an image sent via from VR via HTTP. """

        img = cv2.resize(img, (self.Helpers.confs["cnn"]["data"]["dim"],
                               self.Helpers.confs["cnn"]["data"]["dim"]))
        img = self.reshape(img)

        return self.get_predictions(img)

    def get_predictions(self, img):
        """ Gets a prediction for an image. """

        predictions = self.tf_model.predict_proba(img)
        prediction = np.argmax(predictions, axis=-1)

        return prediction

    def reshape(self, img):
        """ Reshapes an image. """

        dx, dy, dz = img.shape
        input_data = img.reshape((-1, dx, dy, dz))

        return input_data
Ejemplo n.º 28
0
class Trainer():

    ###############################################################
    #
    # Sets up all default requirements and placeholders
    # needed for the NLU engine to run.
    #
    # - Helpers: Useful global functions
    # - JumpWay/jumpWayClient: iotJumpWay class and connection
    # - Logging: Logging class
    #
    ###############################################################

    def __init__(self):

        self.Helpers = Helpers()
        self._confs = self.Helpers.loadConfigs()
        self.LogFile = self.Helpers.setLogFile(self._confs["aiCore"]["Logs"] +
                                               "Train/")

        self.intentMap = {}
        self.words = []
        self.classes = []
        self.dataCorpus = []

        self.Model = Model()
        self.Data = Data()

    def setupData(self):

        self.trainingData = self.Data.loadTrainingData()

        self.words, self.classes, self.dataCorpus, self.intentMap = self.Data.prepareData(
            self.trainingData)
        self.x, self.y = self.Data.finaliseData(self.classes, self.dataCorpus,
                                                self.words)

        self.Helpers.logMessage(self.LogFile, "TRAIN", "INFO",
                                "NLU Training Data Ready")

    def setupEntities(self):

        if self._confs["NLU"]["Entities"] == "Mitie":
            self.entityController = Entities()
            self.entityController.trainEntities(
                self._confs["NLU"]["Mitie"]["ModelLocation"],
                self.trainingData)
            self.Helpers.logMessage(self.LogFile, "TRAIN", "OK",
                                    "NLU Trainer Entities Ready")

    def trainModel(self):

        while True:
            self.Helpers.logMessage(self.LogFile, "TRAIN", "ACTION",
                                    "Ready To Begin Training ? (Yes/No)")
            userInput = input(">")

            if userInput == 'Yes': break
            if userInput == 'No': exit()

        self.setupData()
        self.setupEntities()

        humanStart, trainingStart = self.Helpers.timerStart()

        self.Model.trainDNN(self.x, self.y, self.words, self.classes,
                            self.intentMap)

        trainingEnd, trainingTime, humanEnd = self.Helpers.timerEnd(
            trainingStart)

        self.Helpers.logMessage(
            self.LogFile, "TRAIN", "OK", "NLU Model Trained At " + humanEnd +
            " In " + str(trainingEnd) + " Seconds")
    def __init__(self):
        """ Initializes the Augmentation class. """

        self.Data = Data()
Ejemplo n.º 30
0
class AllDS2020():
    """ AllDS2020 Wrapper Class

    Core wrapper class for the Tensorflow 2.0 AllDS2020 classifier.
    """
    def __init__(self):

        self.Helpers = Helpers("Core")
        self.optimizer = "Adam"
        self.mode = "Local"
        self.do_augmentation = True

    def do_data(self):
        """ Creates/sorts dataset. """

        self.Data = Data(self.optimizer, self.do_augmentation)
        self.Data.data_and_labels_sort()

        if self.do_augmentation == False:
            self.Data.data_and_labels_prepare()
        else:
            self.Data.data_and_labels_augmentation_prepare()

        self.Data.shuffle()
        self.Data.get_split()

    def do_model(self):
        """ Creates & trains the model. 
        
        Replicates the networked and data splits outlined in the  Acute Leukemia Classification 
        Using Convolution Neural Network In Clinical Decision Support System paper
        using Tensorflow 2.0.

        https://airccj.org/CSCP/vol7/csit77505.pdf
        """

        self.Model = Model(self.optimizer, self.do_augmentation)

        self.Model.build_network(self.Data.X_train, self.Data.X_test,
                                 self.Data.y_train, self.Data.y_test)
        self.Model.compile_and_train()

        self.Model.save_model_as_json()
        self.Model.save_weights()

    def do_evaluate(self):
        """ Predictions & Evaluation """

        self.Model.predictions()
        self.Model.evaluate_model()

    def do_metrics(self):
        """ Predictions & Evaluation """
        self.Model.visualize_metrics()

        self.Model.confusion_matrix()
        self.Model.figures_of_merit()

    def do_create_model(self):
        """ Loads the model """

        self.Model = Model(self.optimizer, self.do_augmentation)

    def do_load_model(self):
        """ Loads the model """

        self.Model.load_model_and_weights()

    def do_classify(self):
        """ Loads model and classifies test data """

        self.do_create_model()
        self.do_load_model()
        self.Model.test_classifier()

    def do_http_classify(self):
        """ Loads model and classifies test data """

        self.do_create_model()
        self.Model.test_http_classifier()