def main(): print("loading data...") ds = dataset.Dataset(classes=classes) train_X, train_y = ds.load_data('train') train_X = ds.preprocess_inputs(train_X) train_Y = ds.reshape_labels(train_y) print("input data shape...", train_X.shape) print("input label shape...", train_Y.shape) test_X, test_y = ds.load_data('test') test_X = ds.preprocess_inputs(test_X) test_Y = ds.reshape_labels(test_y) print("creating model...") model = SegNet(input_shape=input_shape, classes=classes) model.compile(loss="categorical_crossentropy", optimizer='adadelta', metrics=["accuracy"]) model.fit(train_X, train_Y, batch_size=batch_size, epochs=epochs, verbose=1, class_weight=class_weighting, validation_data=(test_X, test_Y), shuffle=True) model.save('seg.h5')
def main(): input_shape = (360, 480, 3) classes = 12 epochs = 100 batch_size = 1 log_path = './logs/' class_weighting = [ 0.2595, 0.1826, 4.5640, 0.1417, 0.5051, 0.3826, 9.6446, 1.8418, 6.6823, 6.2478, 3.0, 7.3614 ] # set gpu usage config = tf.ConfigProto(gpu_options=tf.GPUOptions( allow_growth=True, per_process_gpu_memory_fraction=0.8)) session = tf.Session(config=config) set_session(session) print("loading data...") ds = dataset.Dataset(classes=classes, train_file="CamVid/train.txt", test_file="CamVid/test.txt") # need to implement, y shape is (None, 360, 480, classes) train_x, train_y = ds.load_data(root_path="CamVid", mode='train') train_x = ds.preprocess_inputs(train_x) train_y = ds.reshape_labels(train_y) print("input data shape...", train_x.shape) print("input label shape...", train_y.shape) # need to implement, y shape is (None, 360, 480, classes) test_x, test_y = ds.load_data(root_path="CamVid", mode='test') test_x = ds.preprocess_inputs(test_x) test_y = ds.reshape_labels(test_y) tb_cb = TensorBoard(log_dir=log_path, histogram_freq=1, write_graph=True, write_images=True) print("creating model...") model = SegNet(input_shape=input_shape, classes=classes) model.compile(loss="categorical_crossentropy", optimizer='adadelta', metrics=["accuracy"]) model.fit(train_x, train_y, batch_size=batch_size, epochs=epochs, verbose=1, class_weight=class_weighting, validation_data=(test_x, test_y), shuffle=True, callbacks=[tb_cb]) model.save('seg.h5')
def main(): print("loading data...") ds = dataset.DataSet(classes=classes) train_X, train_y = ds.load_data( 'train') # need to implement, y shape is (None, 360, 480, classes) train_X = ds.preprocess_inputs(train_X) train_Y = ds.reshape_labels(train_y) print("input data shape...", train_X.shape) print("input label shape...", train_Y.shape) test_X, test_y = ds.load_data( 'test') # need to implement, y shape is (None, 360, 480, classes) test_X = ds.preprocess_inputs(test_X) test_Y = ds.reshape_labels(test_y) tb_cb = keras.callbacks.TensorBoard(log_dir=log_filepath, histogram_freq=1, write_graph=True, write_images=True) fpath = 'weights.{epoch:02d}.hdf5' mc_cb = keras.callbacks.ModelCheckpoint(fpath, monitor='val_loss', verbose=0, save_best_only=False, save_weights_only=False, mode='auto', period=3) print("creating model...") model = SegNet(input_shape=input_shape, classes=classes) model.compile(loss="categorical_crossentropy", optimizer='adadelta', metrics=["accuracy"]) model.fit(train_X, train_Y, batch_size=batch_size, epochs=epochs, verbose=1, class_weight=class_weighting, validation_data=(test_X[0:5], test_Y[0:5]), shuffle=True, callbacks=[mc_cb, tb_cb]) model.save('seg.h5')
def main(args): # set the necessary list train_list = pd.read_csv(args.train_list, header=None) val_list = pd.read_csv(args.val_list, header=None) # set the necessary directories trainimg_dir = args.trainimg_dir trainmsk_dir = args.trainmsk_dir valimg_dir = args.valimg_dir valmsk_dir = args.valmsk_dir train_gen = data_gen_small( trainimg_dir, trainmsk_dir, train_list, args.batch_size, [args.input_shape[0], args.input_shape[1]], args.n_labels, ) val_gen = data_gen_small( valimg_dir, valmsk_dir, val_list, args.batch_size, [args.input_shape[0], args.input_shape[1]], args.n_labels, ) model = SegNet(args.n_labels, args.kernel, args.pool_size, args.output_mode) model.build(input_shape=(1, 256, 256, 3)) print(model.summary()) model.compile(loss=args.loss, optimizer=args.optimizer, metrics=["accuracy"]) model.fit_generator(train_gen, steps_per_epoch=args.epoch_steps, epochs=args.n_epochs, validation_data=val_gen, validation_steps=args.val_steps, callbacks=[TensorBoard(log_dir="./run")]) model.save_weights(args.save_dir + str(args.n_epochs) + ".hdf5") print("sava weight done..")
def main(): print("loading data...") ds = dataset.Dataset(test_file='test_5.txt', classes=classes) train_X, train_y = ds.load_data('train') # need to implement, y shape is (None, 360, 480, classes) train_X = ds.preprocess_inputs(train_X) train_Y = ds.reshape_labels(train_y) print("input data shape...", train_X.shape) print("input label shape...", train_Y.shape) test_X, test_y = ds.load_data('test') # need to implement, y shape is (None, 360, 480, classes) test_X = ds.preprocess_inputs(test_X) test_Y = ds.reshape_labels(test_y) tb_cb = keras.callbacks.TensorBoard(log_dir=log_filepath, histogram_freq=1, write_graph=True, write_images=True) print("creating model...") model = SegNet(input_shape=input_shape, classes=classes) model.compile(loss="categorical_crossentropy", optimizer='adadelta', metrics=["accuracy"]) model.fit(train_X, train_Y, batch_size=batch_size, epochs=epochs, verbose=1, class_weight=class_weighting , validation_data=(test_X, test_Y), shuffle=True , callbacks=[tb_cb]) model.save('s.h5')
# testControls = np.asarray(testControls).astype('float32') # controls = np.asarray(controls).astype('float32') x_train = np.asarray(images) / 255.0 x_train = np.delete(x_train, len(x_train) - 1, axis=0) print(x_train.shape) # sys.exit() x_test = np.asarray(testImages) / 255.0 x_test = np.delete(x_test, len(x_test) - 1, axis=0) y_train = sc.fit_transform(np.asarray(controls)) # Prepare model for training model = SegNet((IMG_H, IMG_W)) model.compile(optimizer='adam', loss='mean_squared_error') model.fit(x_train, y_train, epochs=EPOCHS, batch_size=BATCH_SIZE) if not os.path.exists('models'): os.mkdir('models') model.save_weights('models/res.h5', save_format='h5') # Predict on testing dataset predictions = model.predict(x_test) predictions = sc.inverse_transform(predictions) print(predictions) # Show results and compare plt.plot(testControls, color='blue', label=f'Real steering')
def main(): # Parse arguments. parser = argparse.ArgumentParser() kwargs = { 'type': int, 'default': 100, 'help': 'The number of times of learning. default: 100' } parser.add_argument('-e', '--epochs', **kwargs) kwargs = { 'type': int, 'default': 10, 'help': 'The frequency of saving model. default: 10' } parser.add_argument('-c', '--checkpoint_interval', **kwargs) kwargs = { 'type': int, 'default': 1, 'help': 'The number of samples contained per mini batch. default: 1' } parser.add_argument('-b', '--batch_size', **kwargs) kwargs = { 'default': False, 'action': 'store_true', 'help': 'Whether store all data to GPU. If not specified this option, use both CPU memory and GPU memory.' } parser.add_argument('--onmemory', **kwargs) args = parser.parse_args() # Prepare training data. dataset = np.load('./temp/dataset.npz') train_x = dataset['train_x'] train_y = dataset['train_y'] test_x = dataset['test_x'] test_y = dataset['test_y'] # Prepare tensorflow. config = tf.ConfigProto(gpu_options=tf.GPUOptions(allow_growth=True)) session = tf.Session(config=config) keras.backend.tensorflow_backend.set_session(session) # Prepare model. model = SegNet(shape=(360, 480, 3)) model.compile(loss='binary_crossentropy', optimizer='adadelta', metrics=['accuracy']) # Training. callbacks = [] timestamp = datetime.datetime.now().strftime("%Y%m%d%H%M%S") directory = f'./logs/{timestamp}/' os.makedirs(directory, exist_ok=True) callbacks.append(keras.callbacks.TensorBoard(log_dir=directory)) filename = 'model-{epoch:04d}.h5' directory = f'./temp/{timestamp}/' os.makedirs(directory, exist_ok=True) callbacks.append( keras.callbacks.ModelCheckpoint(filepath=f'{directory}{filename}', monitor='val_loss', verbose=0, save_best_only=False, save_weights_only=False, mode='auto', period=args.checkpoint_interval)) model.save_weights(f'{directory}{filename}'.format(epoch=0)) if args.onmemory: model.fit(x=train_x, y=train_y, validation_data=(test_x, test_y), epochs=args.epochs, batch_size=args.batch_size, class_weight='balanced', shuffle=True, verbose=1, callbacks=callbacks) else: class Generator(keras.utils.Sequence): def __init__(self, x, y, batch_size, shuffle): self.x = x self.y = y self.batch_size = batch_size self.indices = np.arange(len(self.x)) self.shuffle = shuffle assert len(self.x) == len(self.y) assert len(self.x) % self.batch_size == 0 def __getitem__(self, index): i = index * self.batch_size indices = self.indices[i:i + self.batch_size] x = self.x[indices] y = self.y[indices] return x, y def __len__(self): return len(self.x) // self.batch_size def on_epoch_end(self): if self.shuffle: self.indices = np.random.permutation(self.indices) model.fit_generator(generator=Generator(train_x, train_y, args.batch_size, True), validation_data=Generator(test_x, test_y, args.batch_size, False), epochs=args.epochs, class_weight='balanced', shuffle=True, verbose=1, callbacks=callbacks)