if not os.path.exists(PathOutput): os.makedirs(PathOutput) else: for dirName, subdirList, fileList in os.walk(PathOutput): for filename in fileList: os.remove(PathOutput+filename) logfile=PathOutput+'allnode_PIN.log' csv_logger = CSVLogger(logfile) filename="weights.{epoch:03d}-{val_loss:.2f}.hdf5" checkpointer = ModelCheckpoint(monitor='val_loss', filepath=PathOutput+filename, verbose=1, save_best_only=True, save_weights_only=True) #model.set_weights(init_weights) checkpointer.epochs_since_last_save = 0 checkpointer.best = np.Inf lrate = LearningRateScheduler(my_learning_rate,verbose=1) model.fit(Data_0, Labels_0, epochs=epochs, batch_size=batch_size, validation_data=([Data_1,Labels_1]), callbacks=[checkpointer,csv_logger,lrate], verbose=2, class_weight=weight) ##----------------------- Look for the best model to evaluate for dirName, subdirList, fileList in os.walk(PathOutput): fileList.sort() tmp=fileList[len(fileList)-1] print(tmp) filename=PathOutput+tmp model.load_weights(filename) model.save(PathOutput+'best_model.hd5')
except: print 'Loading weights' model = create_network(args.model, tile_size=args.tile_size, lr=args.lr) model.load_weights(checkpoint_path) model_out_path = join(model_dir, 'checkpoint-{epoch:02d}-{val_loss:.3f}.hdf5') checkpointer = ModelCheckpoint(model_out_path, monitor='val_loss', verbose=1, save_best_only=True, mode='auto', period=1) checkpointer.epochs_since_last_save = initial_epoch initial_epoch = int(initial_epoch) print("Epochs since last save: %d." % checkpointer.epochs_since_last_save) datagen = ImageDataGenerator(rotation_range=180, width_shift_range=0.2, height_shift_range=0.2, rescale=None, shear_range=0.0, zoom_range=args.zoom_range, horizontal_flip=True, fill_mode='reflect') lr_scheduler = MyLearningRateScheduler(epoch_unfreeze=80, start_lr=args.lr,