def plot_W_from_pixel(model, i): H = int(np.sqrt(model.W.shape[0])) W = H toplot = model.W[:, i] utils.data_plot(toplot[None, :]) climabs = np.abs(toplot).max() plt.clim(-climabs, climabs) plt.set_cmap('gray')
import lane_spline_area as lsa from utils.data_plot import * import json if __name__ == '__main__': offline_annotation = True # 离线标注数据 normalize = False # 拉框/标点数据归一化处理 offline_annotation_path = '/home/huhaoyu/Downloads/3d标点离线标注/annotation' # 标注结果存放文件夹 # offline_annotation_path = '/home/huhaoyu/Downloads/离线标注/annotation' # offline_annotation_path = '/home/huhaoyu/Downloads/车道画线_FLV_V10/车道画线_FLV_V10' # 选择评测类型 task_dict = { 1: 'box', 2: 'point', 3: 'line' } task = task_dict[2] # todo 标注结果可视化 if offline_annotation: old_cam_ann, new_cam_ann, ann_num = convert_annotation_offline(offline_annotation_path) # 离线数据格式转换 output_dict = eval_offline(task, old_cam_ann, new_cam_ann, ann_num, normalize) # 评测 data_plot(output_dict, task) # 可视化 else: # 预留的线上数据接口 test_img = get_test_image(task) # 获取数据列表 # output_list = eval(task, test_img) # 获取评估结果 # output_list = eval_offline(task, old_cam_ann, new_cam_ann, ann_num) # data_plot(output_list) # 可视化
print( tabulate([[ 'PcDGAN', str(PcDGAN_MAE_overall) + '+/-' + str(PcDGAN_MAE_std_overall), str(PcDGAN_KDE_overall) + '+/-' + str(PcDGAN_KDE_std_overall), str(PcDGAN_diver_overall) + '+/-' + str(PcDGAN_diver_std_overall) ]], headers=[ 'Model', 'Label Score', 'Probability Density', 'Diversity' ])) ind = np.random.choice(X.shape[0], replace=False, size=1000) data_plot( X[ind], equation, './' + folder + '/Evaluation/PcDGAN/Data_' + str(args.id) + '.png') dist_anim( Xs, conds, equation, './' + folder + '/Evaluation/PcDGAN/out_put_samples_' + str(args.id) + '.mp4') plt.figure(figsize=(18, 12)) plt.rc('font', size=45) plt.plot(conds, PcDGAN_diver, color='#003F5C') plt.fill_between(conds, PcDGAN_diver - PcDGAN_diver_std, PcDGAN_diver + PcDGAN_diver_std, facecolor='#003F5C', edgecolor="#003F5C", alpha=0.3)
# load nmist data (X_dtr, y_dtr), (X_dvl, y_dvl), (X_dts, y_dts) = utils.load_data() # one hot encoding of labels ohe = preprocess.OneHotEncoder(sparse=False) ohe.fit(y_dtr[:, None]) Y_dtr = ohe.transform(y_dtr[:, None]) Y_dvl = ohe.transform(y_dvl[:, None]) Y_dts = ohe.transform(y_dts[:, None]) # plot example data importlib.reload(utils) h_fig, h_ax = plt.subplots(nrows=4, ncols=5) for ax in h_ax.ravel(): plt.axes(ax) utils.data_plot(X_dtr, y_dtr) # get augmented data: horizontal flip X_dtr_flip = utils.data_ravel(utils.data_unravel(X_dtr)[:, :, ::-1]) importlib.reload(utils) h_fig, h_ax = plt.subplots(nrows=4, ncols=5) for ax in h_ax.ravel(): plt.axes(ax) utils.data_plot(X_dtr_flip, y_dtr) X_dtr_all = np.concatenate((X_dtr, X_dtr_flip), axis=0) Y_dtr_all = np.concatenate((Y_dtr, Y_dtr), axis=0) indx_reorder = np.random.permutation(X_dtr_all.shape[0]) X_dtr_all = X_dtr_all[indx_reorder]
# X_dvl = utils.data_unravel(X_dvl)[:, :, :, None] # X_dts = utils.data_unravel(X_dts)[:, :, :, None] # one hot encoding of labels ohe = preprocess.OneHotEncoder(sparse=False) ohe.fit(y_dtr[:, None]) Y_dtr = ohe.transform(y_dtr[:, None]) Y_dvl = ohe.transform(y_dvl[:, None]) Y_dts = ohe.transform(y_dts[:, None]) # plot example data importlib.reload(utils) h_fig, h_ax = plt.subplots(nrows=4, ncols=5) for ax in h_ax.ravel(): plt.axes(ax) utils.data_plot(X_dtr, y_dtr) M0 = X_dtr.shape[1] M1 = 512 batch_size = 32 total_steps = 1000000 learning_rate = 0.03 wd_l2 = 0.001 # weight decay """ define a tf graph for computation """ graph = tf.Graph() with graph.as_default(): """ constant, variable and placeholder """ # place holder X0_in = tf.placeholder(dtype=tf.float32, shape=[batch_size, M0])
with tf.summary.FileWriter('./model_log') as writer: writer.add_graph(session.graph) tf.global_variables_initializer().run() if yn_load_file: model.load_parameters(filedir='./model_save', filename='RBM_tf') model.params_dict_to_tensor() gibss_result = session.run(gibbs_outcome, feed_dict={x0_in: x_batch}) ## utils.data_plot(gibss_result[1], n=10) utils.data_plot(model.dict_params['w'][:, :10].transpose(), n=10) ## with tf.Session(graph=model.graph) as session: tf.global_variables_initializer().run() temp0 = session.run(model.cal_energy(x0=x_in)) print(temp0) session.run( model.load_parameters(filedir='./model_save', filename='RBM_tf')) temp1 = session.run(model.cal_energy(x0=x_in)) print(temp1) ##
X_dvl = utils.data_unravel(X_dvl)[:, :, :, None] X_dts = utils.data_unravel(X_dts)[:, :, :, None] # one hot encoding of labels ohe = preprocess.OneHotEncoder(sparse=False) ohe.fit(y_dtr[:, None]) Y_dtr = ohe.transform(y_dtr[:, None]) Y_dvl = ohe.transform(y_dvl[:, None]) Y_dts = ohe.transform(y_dts[:, None]) # plot example data importlib.reload(utils) h_fig, h_ax = plt.subplots(nrows=4, ncols=5) for ax in h_ax.ravel(): plt.axes(ax) utils.data_plot(X_dtr, y_dtr) ## X_dvl_flp = X_dvl[:, :, ::-1] X_dvl_rot = (np.rot90(X_dvl, k=1, axes=[1, 2])) X_tr = np.reshape(X_dtr, [X_dtr.shape[0], -1]) X_vl = np.reshape(X_dvl, [X_dvl.shape[0], -1]) X_vl_rot = np.reshape(X_dvl_rot, [X_dvl_rot.shape[0], -1]) X_tr = utils.data_binarize(X_tr, threshold=0.5, states='0,1') X_vl = utils.data_binarize(X_vl, threshold=0.5, states='0,1') X_vl_rot = utils.data_binarize(X_vl_rot, threshold=0.5, states='0,1') utils.data_plot(X_vl, n=100, yn_random=False) plt.suptitle('validation, original')
# one hot encoding of labels ohe = preprocess.OneHotEncoder(sparse=False) ohe.fit(y_dtr[:, None]) Y_dtr = ohe.transform(y_dtr[:, None]) Y_dvl = ohe.transform(y_dvl[:, None]) Y_dts = ohe.transform(y_dts[:, None]) # plot example data importlib.reload(utils) h_fig, h_ax = plt.subplots(nrows=4, ncols=5) for ax in h_ax.ravel(): plt.axes(ax) utils.data_plot(X_dtr, y_dtr) # get augmented data: horizontal flip X_dtr_flip = X_dtr[:, :, ::-1] h_fig, h_ax = plt.subplots(nrows=4, ncols=5) for ax in h_ax.ravel(): plt.axes(ax) utils.data_plot(X_dtr_flip, y_dtr) X_dtr_all = np.concatenate((X_dtr, X_dtr_flip), axis=0) Y_dtr_all = np.concatenate((Y_dtr, Y_dtr), axis=0) indx_reorder = np.random.permutation(X_dtr_all.shape[0]) X_dtr_all = X_dtr_all[indx_reorder] Y_dtr_all = Y_dtr_all[indx_reorder]