Ejemplo n.º 1
0
    def __init__(self):
        print("Init")
        if self.instance is not None:
            raise ValueError("Bot fonksiyonunu çağır mayı unuttn? ")

        self.stemmer = LancasterStemmer()
        data = pickle.load(open(path.getPath('trained_data'), "rb"))
        self.words = data['words']
        self.classes = data['classes']
        train_x = data['train_x']
        train_y = data['train_y']
        with open(path.getJsonPath()) as json_data:
            self.intents = json.load(json_data)
        net = tflearn.input_data(shape=[None, len(train_x[0])])
        net = tflearn.fully_connected(net, 8)
        net = tflearn.fully_connected(net, 8)
        net = tflearn.fully_connected(net, len(train_y[0]), activation='softmax')
        net = tflearn.regression(net)
        self.model = tflearn.DNN(net, tensorboard_dir=path.getPath('train_logs'))
        self.model.load(path.getPath('model.tflearn'))
Ejemplo n.º 2
0
train_x = list(
    training[:,
             0])  #eğitilmiş datayı sıfır bir diye iki ayrı listeye dönüştürdük
train_y = list(
    training[:, 1])  #kelimeleri ve  verileri modelimize girmeye hazır kıldık

tf.reset_default_graph()
net = tflearn.input_data(shape=[None, len(train_x[0])])  #
net = tflearn.fully_connected(net, 8)
net = tflearn.fully_connected(net, 8)
net = tflearn.fully_connected(net, len(train_y[0]),
                              activation='softmax')  #olanözcükler
net = tflearn.regression(net)

model = tflearn.DNN(net, tensorboard_dir=path.getPath('train_logs'))
model.fit(train_x, train_y, n_epoch=20000, batch_size=500, show_metric=True)
model.save(path.getPath('model.tflearn'))


def clean_up_sentence(sentence):  #cümleleri temizleyeceğiz
    sentence_words = nltk.word_tokenize(sentence)
    sentence_words = [stemmer.stem(word.lower()) for word in sentence_words
                      ]  #büyük küçük harf farkını ortdan kaldırdık
    return sentence_words  #cümlelerdeki kelimeleri döndürdük


def bow(sentence, words, show_details=False):
    sentence_words = clean_up_sentence(sentence)
    bag = [0] * len(words)
    for s in sentence_words: