コード例 #1
0
class Model():
    def __init__(self):

        ###############################################################
        #
        # Sets up all default requirements
        #
        # - Helpers: Useful global functions
        # - Data: Data functions
        #
        ###############################################################

        self.Helpers = Helpers()
        self.confs = self.Helpers.loadConfigs()

        self.Data = Data()

    def createDNNLayers(self, x, y):

        ###############################################################
        #
        # Sets up the DNN layers, configuration in required/confs.json
        #
        ###############################################################

        net = tflearn.input_data(shape=[None, len(x[0])])

        for i in range(self.confs["NLU"]['FcLayers']):
            net = tflearn.fully_connected(net, self.confs["NLU"]['FcUnits'])
        net = tflearn.fully_connected(net,
                                      len(y[0]),
                                      activation=str(
                                          self.confs["NLU"]['Activation']))

        if self.confs["NLU"]['Regression']:
            net = tflearn.regression(net)

        return net

    def trainDNN(self, x, y, words, classes, intentMap):

        ###############################################################
        #
        # Trains the DNN, configuration in required/confs.json
        #
        ###############################################################

        tf.reset_default_graph()

        tmodel = tflearn.DNN(
            self.createDNNLayers(x, y),
            tensorboard_dir=self.confs["NLU"]['TFLearn']['Logs'],
            tensorboard_verbose=self.confs["NLU"]['TFLearn']['LogsLevel'])

        tmodel.fit(x,
                   y,
                   n_epoch=self.confs["NLU"]['Epochs'],
                   batch_size=self.confs["NLU"]['BatchSize'],
                   show_metric=self.confs["NLU"]['ShowMetric'])

        self.saveModelData(
            self.confs["NLU"]['TFLearn']['Data'], {
                'words': words,
                'classes': classes,
                'x': x,
                'y': y,
                'intentMap': [intentMap]
            }, tmodel)

    def saveModelData(self, path, data, tmodel):

        ###############################################################
        #
        # Saves the model data for TFLearn and the NLU engine,
        # configuration in required/confs.json
        #
        ###############################################################

        tmodel.save(self.confs["NLU"]['TFLearn']['Path'])

        with open(path, "w") as outfile:
            json.dump(data, outfile)

    def buildDNN(self, x, y):

        ###############################################################
        #
        # Loads the DNN model, configuration in required/confs.json
        #
        ###############################################################

        tf.reset_default_graph()
        tmodel = tflearn.DNN(self.createDNNLayers(x, y))
        tmodel.load(self.confs["NLU"]['TFLearn']['Path'])
        return tmodel

    def predict(self, tmodel, parsedSentence, trainedWords, trainedClasses):

        ###############################################################
        #
        # Makes a prediction against the trained model, checking the
        # confidence and then logging the results.
        #
        ###############################################################

        predictions = [[index, confidence] for index, confidence in enumerate(
            tmodel.predict(
                [self.Data.makeBagOfWords(parsedSentence, trainedWords)])[0])]
        predictions.sort(key=lambda x: x[1], reverse=True)

        classification = []
        for prediction in predictions:
            classification.append(
                (trainedClasses[prediction[0]], prediction[1]))

        return classification
コード例 #2
0
class Model():
    """ ALL Detection System 2019 Model Class

    Model class for the ALL Detection System 2019 Chatbot. 
    """
    def __init__(self):
        """ Initializes the Model class. """

        self.Helpers = Helpers()
        self.Data = Data()

    def createDNNLayers(self, x, y):
        """ Sets up the DNN layers """

        net = tflearn.input_data(shape=[None, len(x[0])])

        for i in range(self.Helpers.confs["NLU"]['FcLayers']):
            net = tflearn.fully_connected(net,
                                          self.Helpers.confs["NLU"]['FcUnits'])
        net = tflearn.fully_connected(
            net,
            len(y[0]),
            activation=str(self.Helpers.confs["NLU"]['Activation']))

        if self.Helpers.confs["NLU"]['Regression']:
            net = tflearn.regression(net)

        return net

    def trainDNN(self, x, y, words, classes, intentMap):
        """ Trains the DNN """

        tf.reset_default_graph()

        tmodel = tflearn.DNN(
            self.createDNNLayers(x, y),
            tensorboard_dir=self.Helpers.confs["NLU"]['TFLearn']['Logs'],
            tensorboard_verbose=self.Helpers.confs["NLU"]['TFLearn']
            ['LogsLevel'])

        tmodel.fit(x,
                   y,
                   n_epoch=self.Helpers.confs["NLU"]['Epochs'],
                   batch_size=self.Helpers.confs["NLU"]['BatchSize'],
                   show_metric=self.Helpers.confs["NLU"]['ShowMetric'])

        self.saveModelData(
            self.Helpers.confs["NLU"]['TFLearn']['Data'], {
                'words': words,
                'classes': classes,
                'x': x,
                'y': y,
                'intentMap': [intentMap]
            }, tmodel)

    def saveModelData(self, path, data, tmodel):
        """ Saves the model data """

        tmodel.save(self.Helpers.confs["NLU"]['TFLearn']['Path'])

        with open(path, "w") as outfile:
            json.dump(data, outfile)

    def buildDNN(self, x, y):
        """ Loads the DNN model """

        tf.reset_default_graph()
        tmodel = tflearn.DNN(self.createDNNLayers(x, y))
        tmodel.load(self.Helpers.confs["NLU"]['TFLearn']['Path'])
        return tmodel

    def predict(self, tmodel, parsedSentence, trainedWords, trainedClasses):
        """ Makes a prediction """

        predictions = [[index, confidence] for index, confidence in enumerate(
            tmodel.predict(
                [self.Data.makeBagOfWords(parsedSentence, trainedWords)])[0])]
        predictions.sort(key=lambda x: x[1], reverse=True)

        classification = []
        for prediction in predictions:
            classification.append(
                (trainedClasses[prediction[0]], prediction[1]))

        return classification
コード例 #3
0
ファイル: Model.py プロジェクト: AdamMiltonBarker/GeniSysAI-1
class Model():
    """ Model Class

	Model helper functions.
	"""
    def __init__(self):
        """ Initializes the class. """

        self.Helpers = Helpers("Model")
        self.Data = Data()

        self.Helpers.logger.info("Model class initialized.")

    def createDNN(self, x, y):
        """ Sets up the DNN layers """

        tf_model = tf.keras.models.Sequential([
            tf.keras.layers.Dense(self.Helpers.confs["NLU"]['FcUnits'],
                                  activation='relu',
                                  input_shape=[len(x[0])]),
            tf.keras.layers.Dense(self.Helpers.confs["NLU"]['FcUnits'],
                                  activation='relu'),
            tf.keras.layers.Dense(self.Helpers.confs["NLU"]['FcUnits'],
                                  activation='relu'),
            tf.keras.layers.Dense(self.Helpers.confs["NLU"]['FcUnits'],
                                  activation='relu'),
            tf.keras.layers.Dense(
                len(y[0]), activation=self.Helpers.confs["NLU"]['Activation'])
        ], "GeniSysAI")
        tf_model.summary()
        self.Helpers.logger.info("Network initialization complete.")

        return tf_model

    def trainDNN(self, x, y, words, classes, intentMap):
        """ Trains the DNN """

        tf_model = self.createDNN(x, y)

        optimizer = tf.keras.optimizers.Adam(
            lr=self.Helpers.confs["NLU"]["LR"],
            decay=self.Helpers.confs["NLU"]["Decay"])

        tf_model.compile(optimizer=optimizer,
                         loss='binary_crossentropy',
                         metrics=[
                             tf.keras.metrics.BinaryAccuracy(name='acc'),
                             tf.keras.metrics.Precision(name='precision'),
                             tf.keras.metrics.Recall(name='recall'),
                             tf.keras.metrics.AUC(name='auc')
                         ])

        tf_model.fit(x,
                     y,
                     epochs=self.Helpers.confs["NLU"]['Epochs'],
                     batch_size=self.Helpers.confs["NLU"]['BatchSize'])

        self.saveModelData(
            self.Helpers.confs["NLU"]['Model']['Data'], {
                'words': words,
                'classes': classes,
                'x': x,
                'y': y,
                'intentMap': [intentMap]
            }, tf_model)

    def saveModelData(self, path, data, tmodel):
        """ Saves the model data """

        with open(self.Helpers.confs["NLU"]['Model']['Model'], "w") as file:
            file.write(tmodel.to_json())

        self.Helpers.logger.info("Model JSON saved " +
                                 self.Helpers.confs["NLU"]['Model']['Model'])

        with open(self.Helpers.confs["NLU"]['Model']['Data'], "w") as outfile:
            json.dump(data, outfile)

        tmodel.save_weights(self.Helpers.confs["NLU"]['Model']['Weights'])
        self.Helpers.logger.info("Weights saved " +
                                 self.Helpers.confs["NLU"]['Model']['Weights'])

    def buildDNN(self, x, y):
        """ Loads the DNN model """

        with open(self.Helpers.confs["NLU"]['Model']['Model']) as file:
            m_json = file.read()

        tmodel = tf.keras.models.model_from_json(m_json)
        tmodel.load_weights(self.Helpers.confs["NLU"]['Model']['Weights'])

        self.Helpers.logger.info("Model loaded ")
        return tmodel

    def predict(self, tmodel, parsedSentence, trainedWords, trainedClasses):
        """ Makes a prediction """

        predictions = [[index, confidence] for index, confidence in enumerate(
            tmodel.predict(
                [[self.Data.makeBagOfWords(parsedSentence, trainedWords)]])[0])
                       ]
        predictions.sort(key=lambda x: x[1], reverse=True)

        classification = []
        for prediction in predictions:
            classification.append(
                (trainedClasses[prediction[0]], prediction[1]))

        return classification