Beispiel #1
0
                          loss=tf.nn.softmax_cross_entropy_with_logits,
                          run_eagerly=True)
'''
MODEL 5 Input --> Dense Layer --> Projection --> Output
'''
model = models.Sequential()
model.add(layers.Dense(32, input_shape=(300, )))
model.add(layer=ProjectVectorLayer(dimension=32))

model.compile(optimizer='SGD',
              loss=tf.nn.softmax_cross_entropy_with_logits,
              run_eagerly=True)
'''
TRAINING THE MODEL
'''
'''
# Checkpoint to save weights at lowest validation loss
checkpoint_filepath = '/tmp/checkpoint'
model_checkpoint_callback = tf.keras.callbacks.ModelCheckpoint(
    filepath=checkpoint_filepath,
    save_weights_only=True,
    monitor='val_loss',
    mode='min',
    save_best_only=True)

early_stopping = EarlyStopping(monitor='val_loss', mode='min', verbose=1, patience=300)


num_epochs = 250
history = model.fit(x_train, y_train, epochs=num_epochs, batch_size=32, validation_data=(x_val, y_val), callbacks=[model_checkpoint_callback])
# print model structure diagram
print(classifier.summary())

# Compiling the CNN
classifier.compile(adam(lr=.0001),
                   loss='categorical_crossentropy',
                   metrics=['accuracy'])

cp_callback = create_callback(checkpoint_path, 1)

hist = classifier.fit_generator(
    train_generator,
    epochs=num_epoch,
    steps_per_epoch=num_train_samples // batch_size,
    validation_data=validation_generator,
    validation_steps=num_validate_samples // batch_size,
    class_weight='auto',
    callbacks=[cp_callback])

# Plot training and validation accuracy
plot_accuracy(hist)

# Plot training and validation loss
plot_loss(hist)

# Print test set accuracy and loss values
scores = classifier.evaluate_generator(test_generator,
                                       num_test_samples / batch_size)
print("loss: {}, accuracy: {}".format(scores[0], scores[1]))
num_epochs = 1000
history = model.fit(x_train,
                    y_train,
                    epochs=num_epochs,
                    batch_size=batch_size,
                    validation_data=(x_val, y_val),
                    callbacks=[model_checkpoint_callback])

# Loading the Best Weights
model.load_weights(checkpoint_filepath)

# Save this Model
model.save_weights('verb_prediction_from_dependent_model')

# Training History
history_dict = history.history
loss_values = history_dict['loss']
val_loss_values = history_dict['val_loss']

num_epochs = len(val_loss_values)

# Plot the loss
fun.plot_loss(num_epochs, loss_values, val_loss_values)

# Calculate Perplexity
model.load_weights('verb_prediction_from_dependent_model')  # Load the model

perplexity = fun.calculate_perplexity(x_val, y_val, model)
print("Perplexity: " + str(perplexity))
Beispiel #4
0
# print model structure diagram
print(model.summary())

# Transfer Learning
print("\nPerforming Transfer Learning")

# Compiling the model
model.compile(optimizer='adam',
              loss='categorical_crossentropy',
              metrics=['accuracy'])

# Fit the Transfer Learning model to the data from the generators
history = model.fit_generator(train_generator,
                              epochs=num_epoch,
                              steps_per_epoch=num_train_samples // batch_size,
                              validation_data=validation_generator,
                              validation_steps=num_validate_samples //
                              batch_size,
                              class_weight='auto',
                              shuffle=True)

# Plot training and validation accuracy
plot_accuracy(history)

# Plot training and validation loss
plot_loss(history)

# Print test set accuracy and loss values
scores = model.evaluate_generator(test_generator,
                                  num_test_samples / batch_size)
print("loss: {}, accuracy: {}".format(scores[0], scores[1]))