model = model.model(training=True) model.summary() model.compile(optimizer=optimizer, loss=tf.keras.losses.BinaryCrossentropy(), metrics=["accuracy"]) # model.load_weights( MODEL_CHECK_POINT_PATH + 'cp{}.ckpt'.format(50)) train_dataset = DataGenerator(TRAIN_SUMMARY_PATH) test_dataset = DataGenerator(TEST_SUMMARY_PATH) steps_per_epoch = train_dataset.number_of_batches with open(LOG_PATH, 'w') as wf: wf.write('{}\t{}\t{}\t{}\t{}\n'.format('step', 'train_loss', 'train_acc', 'test_loss', 'test_acc')) for i in range(20, 50): train_data = train_dataset.data_generator(training=True, img_augmentation=True) test_data = test_dataset.data_generator(training=False) model.optimizer.lr = 0.0001 history = model.fit_generator(train_data, steps_per_epoch=steps_per_epoch, epochs=1) train_loss = (history.history['loss'])[0] train_acc = (history.history['accuracy'])[0] test_loss = [] test_acc = [] for x_test, y_test in test_data: loss, acc = model.evaluate(x_test, y_test, verbose=False) test_loss.append(loss) test_acc.append(acc) # break test_loss = np.mean(test_loss)
optimizer = model.optimizer() model = model.model(training=True) model.compile(optimizer=optimizer, loss=tf.keras.losses.BinaryCrossentropy(), metrics=["accuracy"]) # model.load_weights('check_point/cp{}.h5'.format(50)) train_data = DataGenerator(TRAIN_SUMMARY) steps_per_epoch = train_data.number_of_training_batches test_data = DataGenerator(TEST_SUMMARY) with open(LOG_PATH, 'w') as wf: wf.write('{}\t{}\t{}\t{}\t{}\n'.format('step', 'train_loss', 'train_acc', 'test_loss', 'test_acc')) for i in range(50): train = train_data.data_generator(training=True) test = test_data.data_generator(training=False) history = model.fit_generator(train, steps_per_epoch=steps_per_epoch, epochs=1) train_loss = (history.history['loss'])[0] train_acc = (history.history['accuracy'])[0] test_loss = [] test_acc = [] for x_test, y_test in test: loss, acc = model.evaluate(x_test, y_test, verbose=False) test_loss.append(loss) test_acc.append(acc)