import tensorflow as tf
import keras
from keras.applications.resnet50 import ResNet50

ResNet50 = ResNet50()

callback_list = [
    keras.callbacks.EarlyStopping(monitor='val_acc', patience=5),
    keras.callbacks.ModelCheckpoint(filepath='ResNet50_lib.h5',
                                    monitor='val_loss',
                                    save_best_only=True)
]

ResNet50.compile(loss='sparse_categorical_crossentropy',
                 optimizer='adam',
                 metrics=['acc'])

ResNet50.summary()

# Model Fit! 제발 잘 됐으면 좋.겠.다.
import time  # 훈련할 때마다 time을 가져와서 초기화를 시켜줘야 합니다!
start = time.time()
history = ResNet50.fit(train_image,
                       train_label,
                       epochs=100,
                       callbacks=callback_list,
                       validation_data=(test_image, test_label))
time = time.time() - start
print("테스트 시 소요 시간(초) : {}".format(time))
print("전체 파라미터 수 : {}".format(
Example #2
0
# print("Second stage done.")
# try:
#     loss_history = np.append(loss_history, hist.history['val_loss'])
#     acc_history = np.append(acc_history, hist.history['val_acc'])
# except KeyError:
#     pass

# Stage 3:
print("Stage 3:")

for layer in model.layers:
    layer.trainable = True
dropout_layer = model.layers[-2]
dropout_layer.rate = .5
model.compile(optimizer=Adam(0.0000001),
              loss='categorical_crossentropy',
              metrics=METRICS)
print("Model compiled. Initiating training...")
# epoch = 10

hist = model.fit(train_generator.flow(X_Train, Y_Train, batch_size=batch_size),
                 steps_per_epoch=len(X_Train) / batch_size,
                 epochs=10,
                 class_weight=class_weights,
                 shuffle=True,
                 validation_data=val_generator.flow(X_Val,
                                                    Y_Val,
                                                    batch_size=batch_size),
                 callbacks=callbacks,
                 verbose=2)
Example #3
0
loss_fun = 'categorical_crossentropy'
metrics = ['accuracy']
optimizer = optimizers.rmsprop(lr=learning_rate)

# ### Train Model
# Here I use fine-tuning by replacing the last fully connected layer and training all network parameters.

# In[ ]:

# Set number of epochs, batch size and number of batches to yield from generator for each epoch
epoch_nb = 10
batch_size = 64
steps_per_epoch = 5

# Compile model
ResNet50.compile(optimizer=optimizer, loss=loss_fun, metrics=metrics)

# Save optimal parameters (i.e., of train iteration having the minimal validation loss).
checkpointer = ModelCheckpoint(filepath='./Weights/weights.best.ResNet50.hdf5',
                               verbose=1,
                               save_best_only=True)

# Generate validation set (use same set for each batch, in order to correctly save best weights)
valid_X, valid_y = valid_generator.__getitem__(0)

# Fit model
ResNet50.fit_generator(
    generator=train_generator,
    validation_data=(valid_X, valid_y),
    epochs=epoch_nb,
    steps_per_epoch=steps_per_epoch,