if config.l2 > 0: model = apply_kernel_regularizer(model, tf.keras.regularizers.l2(config.l2)) model.compile(optimizer=opt, loss=custom_loss, metrics=[d_total, cos_sim]) model.summary() """ DATA """ train_set = make_dataset(config, training=True) test_set = make_dataset(config, training=False) """ TRAINING """ callbacks = [ CSVLogger(NAME.replace('.h5', '.log'), append=True), LearningRateScheduler(custom_scheduler(4096, TOTAL_EPOCH / 12)), SWA(start_epoch=TOTAL_EPOCH // 2, swa_freq=2), ModelCheckpoint(NAME, monitor='val_d_total', save_best_only=True, verbose=1), TerminateOnNaN() ] model.fit(train_set, epochs=TOTAL_EPOCH, batch_size=BATCH_SIZE, steps_per_epoch=config.steps_per_epoch, validation_data=test_set, validation_steps=12, callbacks=callbacks) model.save(NAME.replace('.h5', '_SWA.h5'))
mode='max', save_best_only=True), TensorBoard(log_dir=f'./logs/{NAME.replace(".h5", "")}', histogram_freq=0, profile_batch=2), TerminateOnNaN() ] model.fit(train_dataset, epochs=TOTAL_EPOCH, batch_size=BATCH_SIZE, validation_data=(val_x, val_y), steps_per_epoch=x.shape[0]//BATCH_SIZE, class_weight=class_weight, callbacks=callbacks) result = model.evaluate(test_x, test_y, verbose=1) with open(NAME.replace('.h5', '.log'), 'a') as f: f.write(f'\n{result}\n') # For SWA repeat = x.shape[0] // BATCH_SIZE for x, y in train_dataset: model(x, training=True) repeat -= 1 if repeat <= 0: break model.evaluate(test_x, test_y, verbose=1) model.save(NAME.replace('.h5', "_SWA.h5"))