def main(args): # set the necessary list train_list = pd.read_csv(args.train_list, header=None) val_list = pd.read_csv(args.val_list, header=None) # set the necessary directories trainimg_dir = args.trainimg_dir trainmsk_dir = args.trainmsk_dir valimg_dir = args.valimg_dir valmsk_dir = args.valmsk_dir train_gen = data_gen_small( trainimg_dir, trainmsk_dir, train_list, args.batch_size, [args.input_shape[0], args.input_shape[1]], args.n_labels, ) val_gen = data_gen_small( valimg_dir, valmsk_dir, val_list, args.batch_size, [args.input_shape[0], args.input_shape[1]], args.n_labels, ) model = SegNet(args.n_labels, args.kernel, args.pool_size, args.output_mode) model.build(input_shape=(1, 256, 256, 3)) print(model.summary()) model.compile(loss=args.loss, optimizer=args.optimizer, metrics=["accuracy"]) model.fit_generator(train_gen, steps_per_epoch=args.epoch_steps, epochs=args.n_epochs, validation_data=val_gen, validation_steps=args.val_steps, callbacks=[TensorBoard(log_dir="./run")]) model.save_weights(args.save_dir + str(args.n_epochs) + ".hdf5") print("sava weight done..")
x_test = np.asarray(testImages) / 255.0 x_test = np.delete(x_test, len(x_test) - 1, axis=0) y_train = sc.fit_transform(np.asarray(controls)) # Prepare model for training model = SegNet((IMG_H, IMG_W)) model.compile(optimizer='adam', loss='mean_squared_error') model.fit(x_train, y_train, epochs=EPOCHS, batch_size=BATCH_SIZE) if not os.path.exists('models'): os.mkdir('models') model.save_weights('models/res.h5', save_format='h5') # Predict on testing dataset predictions = model.predict(x_test) predictions = sc.inverse_transform(predictions) print(predictions) # Show results and compare plt.plot(testControls, color='blue', label=f'Real steering') plt.plot(predictions, color='red', label=f'Predicted steering') plt.title(f"Steering Angle Prediction") plt.xlabel('Frame') plt.ylabel('Steering Angle') plt.legend() plt.show()
def main(): # Parse arguments. parser = argparse.ArgumentParser() kwargs = { 'type': int, 'default': 100, 'help': 'The number of times of learning. default: 100' } parser.add_argument('-e', '--epochs', **kwargs) kwargs = { 'type': int, 'default': 10, 'help': 'The frequency of saving model. default: 10' } parser.add_argument('-c', '--checkpoint_interval', **kwargs) kwargs = { 'type': int, 'default': 1, 'help': 'The number of samples contained per mini batch. default: 1' } parser.add_argument('-b', '--batch_size', **kwargs) kwargs = { 'default': False, 'action': 'store_true', 'help': 'Whether store all data to GPU. If not specified this option, use both CPU memory and GPU memory.' } parser.add_argument('--onmemory', **kwargs) args = parser.parse_args() # Prepare training data. dataset = np.load('./temp/dataset.npz') train_x = dataset['train_x'] train_y = dataset['train_y'] test_x = dataset['test_x'] test_y = dataset['test_y'] # Prepare tensorflow. config = tf.ConfigProto(gpu_options=tf.GPUOptions(allow_growth=True)) session = tf.Session(config=config) keras.backend.tensorflow_backend.set_session(session) # Prepare model. model = SegNet(shape=(360, 480, 3)) model.compile(loss='binary_crossentropy', optimizer='adadelta', metrics=['accuracy']) # Training. callbacks = [] timestamp = datetime.datetime.now().strftime("%Y%m%d%H%M%S") directory = f'./logs/{timestamp}/' os.makedirs(directory, exist_ok=True) callbacks.append(keras.callbacks.TensorBoard(log_dir=directory)) filename = 'model-{epoch:04d}.h5' directory = f'./temp/{timestamp}/' os.makedirs(directory, exist_ok=True) callbacks.append( keras.callbacks.ModelCheckpoint(filepath=f'{directory}{filename}', monitor='val_loss', verbose=0, save_best_only=False, save_weights_only=False, mode='auto', period=args.checkpoint_interval)) model.save_weights(f'{directory}{filename}'.format(epoch=0)) if args.onmemory: model.fit(x=train_x, y=train_y, validation_data=(test_x, test_y), epochs=args.epochs, batch_size=args.batch_size, class_weight='balanced', shuffle=True, verbose=1, callbacks=callbacks) else: class Generator(keras.utils.Sequence): def __init__(self, x, y, batch_size, shuffle): self.x = x self.y = y self.batch_size = batch_size self.indices = np.arange(len(self.x)) self.shuffle = shuffle assert len(self.x) == len(self.y) assert len(self.x) % self.batch_size == 0 def __getitem__(self, index): i = index * self.batch_size indices = self.indices[i:i + self.batch_size] x = self.x[indices] y = self.y[indices] return x, y def __len__(self): return len(self.x) // self.batch_size def on_epoch_end(self): if self.shuffle: self.indices = np.random.permutation(self.indices) model.fit_generator(generator=Generator(train_x, train_y, args.batch_size, True), validation_data=Generator(test_x, test_y, args.batch_size, False), epochs=args.epochs, class_weight='balanced', shuffle=True, verbose=1, callbacks=callbacks)