batch_size = 4 es = EarlyStopping(monitor='val_loss', patience=20, restore_best_weights=True, verbose=1) lr = ReduceLROnPlateau(monitor='val_loss', vactor=0.5, patience=10, verbose=1) path = 'C:/nmb/nmb_data/h5/5s/Xception/xception_sgd_1.h5' mc = ModelCheckpoint(path, monitor='val_loss', verbose=1, save_best_only=True) model.compile(optimizer=op, loss="sparse_categorical_crossentropy", metrics=['acc']) history = model.fit(x_train, y_train, epochs=1000, batch_size=batch_size, validation_split=0.2, callbacks=[es, lr, mc]) # 평가, 예측 model = load_model('C:/nmb/nmb_data/h5/5s/Xception/xception_sgd_1.h5') # model.load_weights('C:/nmb/nmb_data/h5/5s/Xception/xception_sgd_1.h5') result = model.evaluate(x_test, y_test, batch_size=8) print("loss : {:.5f}".format(result[0])) print("acc : {:.5f}".format(result[1])) ############################################ PREDICT #################################### pred = ['C:/nmb/nmb_data/predict_04_26/F', 'C:/nmb/nmb_data/predict_04_26/M'] count_f = 0
import tensorflow as tf from tensorflow.keras.applications import Xception from tensorflow.keras.utils import multi_gpu_model import numpy as np import datetime num_samples = 100 height = 71 width = 71 num_classes = 100 start1 = datetime.datetime.now() with tf.device('/gpu:0'): model = Xception(weights=None, input_shape=(height, width, 3), classes=num_classes) model.compile(loss='categorical_crossentropy', optimizer='rmsprop') # Generate dummy data. x = np.random.random((num_samples, height, width, 3)) y = np.random.random((num_samples, num_classes)) model.fit(x, y, epochs=3, batch_size=16) model.save('my_model_h5') end1 = datetime.datetime.now()