except:
    # Make our neural network
    myChatModel = Sequential()
    myChatModel.add(Dense(8, input_shape=[len(words)], activation='relu'))
    myChatModel.add(Dense(len(labels), activation='softmax'))

    # optimize the model
    myChatModel.compile(loss='categorical_crossentropy',
                        optimizer='adam',
                        metrics=['accuracy'])

    # train the model
    myChatModel.fit(training, output, epochs=1000, batch_size=8)

    # serialize model to yaml and save it to disk
    model_yaml = myChatModel.to_yaml()
    with open("chatbotmodel.yaml", "w") as y_file:
        y_file.write(model_yaml)

    # serialize weights to HDF5
    myChatModel.save_weights("chatbotmodel.h5")
    print("Saved model from disk")


def bag_of_words(s, words):
    bag = [0 for _ in range(len(words))]

    s_words = nltk.word_tokenize(s)
    s_words = [stemmer.stem(word.lower()) for word in s_words]

    for se in s_words:
Ejemplo n.º 2
0
addition_model.fit(input_data,
                   output_data,
                   batch_size=1,
                   epochs=100,
                   verbose=1)

# Modell wird gespeichert
addition_model.save("addition_model.h5")

# Und auch für TensorFlow.js!
# tfjs.converters.save_keras_model(addition_model, "./addition_model")

print("== Modell als JSON-Struktur ==")
pprint(addition_model.to_json())
pprint("== Modell als YAML-Struktur ==")
pprint(addition_model.to_yaml())

# Weights werden gespeichert
addition_model.save_weights("addition_weights.h5")

# Struktur des Modells wird als JSON gespeichert
json_str = addition_model.to_json()

with open("addition_model.json", "w") as json_file:
    json_file.write(json_str)

# Modell wird neu geladen (vom .h5 Datei)
model = load_model('addition_model.h5')
result = model.predict([[[5,
                          5]]])  # Das Ergebnis müsste ungefähr bei 10 liegen
print("Ergebnis: {}".format(result))