def train_model(x_train, x_valid, y_train, y_valid, model_name, args): ''' Train the model ''' model = build_model(args) model.compile(loss='mean_squared_error', optimizer=Adam(lr=args.learning_rate)) train_generator = Generator(x_train, y_train, True, args) validation_generator = Generator(x_valid, y_valid, False, args) pass model.save(model_name)
def train_model(x_train, x_valid, y_train, y_valid, model_name, args): ''' Train the model ''' model = build_model(args) pass train_generator = Generator(x_train, y_train, True, args) validation_generator = Generator(x_valid, y_valid, False, args) model.fit_generator(train_generator, validation_data=\ validation_generator, epochs=\ args.nb_epoch, use_multiprocessing=\ False, max_queue_size=\ 10, workers=\ 4) model.save(model_name)
def train_model(x_train, x_valid, y_train, y_valid, model_name, args): ''' Train the model ''' model = build_model(args) model.compile(loss='mean_squared_error', optimizer=Adam(lr=args.learning_rate)) train_generator = Generator(x_train, y_train, True, args) validation_generator = Generator(x_valid, y_valid, False, args) model.fit_generator(train_generator, validation_data=\ validation_generator, epochs=\ args.nb_epoch, use_multiprocessing=\ False, max_queue_size=\ 10, workers=\ 4) model.save(model_name)
def train_model(model, args, x_train, x_valid, y_train, y_valid): """ Train the model """ checkpoint = ModelCheckpoint( 'self-driving-car-train' + str(args.train_num) + '-{epoch:03d}.h5', monitor='val_loss', verbose=0, save_best_only=args. save_best_only, # save the model only if the val_loss gets low mode='auto') model.compile(loss='mean_squared_error', optimizer=Adam(lr=args.learning_rate)) # shuffle the data because they are sequential; should help over-fitting towards certain parts of the track only x_train, y_train = shuffle(x_train, y_train, random_state=0) x_valid, y_valid = shuffle(x_valid, y_valid, random_state=0) # data for training are augmented, data for validation are not train_generator = Generator(x_train, y_train, True, args) validation_generator = Generator(x_valid, y_valid, False, args) history = model.fit_generator(train_generator, validation_data=validation_generator, epochs=args.nb_epoch, callbacks=[checkpoint], verbose=1) # summarize history for loss plt.plot(history.history['loss']) plt.plot(history.history['val_loss']) plt.title('model loss') plt.ylabel('loss') plt.xlabel('epoch') plt.title('training' + str(args.train_num)) plt.legend(['train', 'val'], loc='upper left') # plt.show() plt.savefig('history-training' + str(args.train_num) + '.png') # save the last model anyway (might not be the best) model.save("models/model-train" + str(args.train_num) + "-final.h5")
def train_model(model, args, X_train, X_valid, y_train, y_valid): """ Train the model """ os.makedirs('trained_models', exist_ok=True) checkpoint = ModelCheckpoint( 'trained_models/self-driving-car-{epoch:03d}.h5', monitor='val_loss', verbose=0, save_best_only=args.save_best_only, mode='auto') model.compile(loss='mean_squared_error', optimizer=Adam(lr=args.learning_rate)) train_generator = Generator(X_train, y_train, True, args) validation_generator = Generator(X_valid, y_valid, False, args) # model.fit_generator(train_generator, # validation_data=validation_generator, # epochs=args.nb_epoch, # use_multiprocessing=False, # max_queue_size=10, # workers=4, # callbacks=[checkpoint], # verbose=1, # steps_per_epoch=X_train.shape[0], # validation_steps=1 # ) model.fit_generator(batch_generator(args.data_dir, X_train, y_train, args.batch_size, True), args.samples_per_epoch, args.nb_epoch, max_q_size=1, validation_data=batch_generator( args.data_dir, X_valid, y_valid, args.batch_size, False), nb_val_samples=len(X_valid), callbacks=[checkpoint], verbose=1)
def train_model(model, args, X_train, X_valid, y_train, y_valid): """ Train the model """ checkpoint = ModelCheckpoint(args.model + '-{epoch:03d}.h5', monitor='val_loss', verbose=0, save_best_only=args.save_best_only, mode='auto') train_generator = Generator(X_train, y_train, True, args) validation_generator = Generator(X_valid, y_valid, False, args) model.fit_generator(train_generator, validation_data=validation_generator, epochs=args.nb_epoch, use_multiprocessing=False, max_queue_size=10, workers=4, callbacks=[checkpoint], verbose=1)
def train_model(model, args, X_train, X_valid, y_train, y_valid): """ Train the model """ checkpoint = ModelCheckpoint('self-driving-car-{epoch:03d}.h5', monitor='val_loss', verbose=0, save_best_only=args.save_best_only, mode='auto') model.compile(loss='mean_squared_error', optimizer=Adam(lr=args.learning_rate)) train_generator = Generator(X_train, y_train, True, args) validation_generator = Generator(X_valid, y_valid, False, args) model.fit_generator(train_generator, validation_data=validation_generator, epochs=args.nb_epoch, use_multiprocessing=True, max_queue_size=10, workers=4, callbacks=[checkpoint], verbose=1)
def get_embed_helper(model, args, X, y, mode, layer_name, dtheta=0): intermediate_layer_model = Model( inputs=model.input, outputs=(model.get_layer(layer_name).output, model.output)) validation_generator = Generator(X, y, False, args, mode, dtheta) intermediate_output = intermediate_layer_model.predict_generator( validation_generator, callbacks=None, verbose=1) embed = intermediate_output[0] y_pred = intermediate_output[1].squeeze() # discard the last few examples l1_dist = np.abs(y_pred - y[:len(y_pred)]) import sklearn mse = sklearn.metrics.mean_squared_error(y[:len(y_pred)], y_pred) # print(mode, 'mean error :', np.mean(l1_dist), '% > 0.1', np.mean(l1_dist>0.1)) return embed, y_pred, l1_dist