def find_param_plot(batch_size_list, lr_list, L_method): loss_list, train_acc_list = [], [] for batch_size in batch_size_list: for lr in lr_list: softmax_classifier = softmax([3072, 10]) loss, best_acc = softmax_classifier.train(X_train, y_train, batch_size=batch_size, epoch=10, lr=lr, reg=1e-5, normalize_type=L_method) loss = min(loss) loss_list.append(loss) train_acc_list.append(best_acc) plot.plot_3d(batch_size_list, lr_list, loss_list, ["batch_size", "learning rate", "loss"], "bs-lr-loss") plot.plot_3d(batch_size_list, lr_list, train_acc_list, ["batch_size", "learning rate", "train_acc"], "bs-lr-acc")
def template_match(key): print("__start_template_match__") t1 = time.time() if key == 0: pos = sad_match() elif key == 1: pos = zncc_match() else: print("__false_keyward__") exit() t2 = time.time() -t1 print("__Finish_time:{0}s__".format(t2)) #pos = ransac(pos) from PIL import Image plot_3d(pos) Image.fromarray(np.uint8(pos)).save("stereo_gray{0}.png".format(datetime.date.today())) np.save("stereo_match",pos)
def plot(): pos = np.load("stereo_match.npy") return plot_3d(pos)
def train_model(args): logging.basicConfig(format='[%(asctime)s] %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p', level=logging.INFO) logging.info('Train model') logging.info('Loading data...') # Split data into training set and test set xs, ys, n = load_data(args.X, args.Y, shuffle=True) n_train = int(args.frac * n) # Data preprocessing preprocessor = Preprocessor() rng = abs(args.max - args.min) xs_n = preprocessor.normalize(xs, rng) xs_n_filtered = xs_n if args.craft: xs_n_filtered = filter_data(xs_n_filtered) # Feature extraction logging.info('Computing means and sigmas (%s)...' % args.pre) means, sigmas = get_means_sigmas(args, xs_n_filtered) if args.craft: means, sigmas = crafted_gaussian_feature(means, sigmas) def phi(x): pre = Preprocessor() return pre.gaussian(pre.normalize(x, rng), means, sigmas) logging.info('Preprocessing... (d = %d; craft-feature %d)' % (means.shape[0], args.craft)) phi_xs = phi(xs) phi_xs_train, ys_train = phi_xs[:n_train], ys[:n_train] phi_xs_test, ys_test = phi_xs[n_train:], ys[n_train:] phi_dim = len(phi_xs_train[0]) model = get_model(args, (phi_dim, )) logging.info('Using model %s (plot = %s)' % (args.model, args.plot)) def f(x): return np.round(np.clip(model.test(sess, x), args.min, args.max)) with tf.Session() as sess: logging.info('Training... (optimizer = %s)' % args.optimizer) if args.K <= 1: train_loss = train(args, sess, model, phi_xs_train, ys_train) logging.info('Training loss = %f' % train_loss) if n_train < n: test_loss = model.eval(sess, phi_xs_test, ys_test) logging.info('Testing loss = %f' % test_loss) if args.output is not None: logging.info('Save model at %s' % args.output) model.save_to_file(sess, args.output) np.save(args.output + '-mean', means) np.save(args.output + '-sigma', sigmas) if args.plot is not None: logging.info('Plotting... (output = %s)' % args.fig) if args.plot == '3d': plot_3d(f, phi, args.min, args.max, args.min, args.max, 0, 1081, args.fig) elif args.plot == '2d': plot_2d_map(f, phi, args.min, args.max, args.min, args.max) else: validation_loss = train_cross_validation(args, sess, model, phi_xs_train, ys_train) log_filename = args.log with open(log_filename, 'w') as log_file: log_file.write('%s\t%s\n' % (log_filename, validation_loss))
# save features utils.save_features(model_dir, "X_train", X_train, y_train) utils.save_features(model_dir, "X_test", X_test, y_test) utils.save_features(model_dir, "Z_train", Z_train, y_train) utils.save_features(model_dir, "Z_test", Z_test, y_test) # evaluation train _, acc_svm = evaluate.svm(Z_train, y_train, Z_train, y_train) acc_knn = evaluate.knn(Z_train, y_train, Z_train, y_train, k=5) acc_svd = evaluate.nearsub(Z_train, y_train, Z_train, y_train, n_comp=2) acc = {"svm": acc_svm, "knn": acc_knn, "nearsub-svd": acc_svd} utils.save_params(model_dir, acc, name="acc_train.json") # evaluation test _, acc_svm = evaluate.svm(Z_train, y_train, Z_test, y_test) acc_knn = evaluate.knn(Z_train, y_train, Z_test, y_test, k=5) acc_svd = evaluate.nearsub(Z_train, y_train, Z_test, y_test, n_comp=2) acc = {"svm": acc_svm, "knn": acc_knn, "nearsub-svd": acc_svd} utils.save_params(model_dir, acc, name="acc_test.json") # plot plot.plot_combined_loss(model_dir) plot.plot_heatmap(X_train, y_train, "X_train", model_dir) plot.plot_heatmap(X_test, y_test, "X_test", model_dir) plot.plot_heatmap(Z_train, y_train, "Z_train", model_dir) plot.plot_heatmap(Z_test, y_test, "Z_test", model_dir) plot.plot_3d(X_train, y_train, "X_train", model_dir) plot.plot_3d(X_test, y_test, "X_test", model_dir) plot.plot_3d(Z_train, y_train, "Z_train", model_dir) plot.plot_3d(Z_test, y_test, "Z_test", model_dir)
print("Converged: %s" % converged) print(traj) print(line_search_factors) trajs = {} trajs[0] = traj endpoints, counts = plot.get_endpoints_and_counts(trajs) for i in range(0, len(endpoints)): print(endpoints[i], " : ", counts[i]) fig = plt.figure() ax = fig.add_subplot(111, projection='3d') # plot.plot_3d_endpoint_lines(ax, trajs) plot.plot_3d(ax, x_rng, y_rng, obj_func) plt.savefig(fig_dir + "/3d.png", dpi=200) plt.figure() plot.plot_obj_func(x_rng, y_rng, obj_func) plot.plot_trajs(x_rng, y_rng, trajs, [0]) plt.title("Trajs") plt.savefig(fig_dir + "/trajs.png", dpi=200) plt.figure() plot.plot_obj_func(x_rng, y_rng, obj_func) # plot.plot_quiver(x_rng, y_rng, trajs) plot.plot_endpoint_counts(trajs) plt.title("Endpoints") plt.savefig(fig_dir + "/endpoints.png", dpi=200)