def _restart(self): # make a deep copy data = [d.copy() for d in self.data_copy] label = [l.copy() for l in self.label_copy] data, label = indoor3d_util.room2blocks_wrapper_normalized(data, label, self.sample_size, block_size=1.0, stride=0.5, random_sample=False, sample_num=None) # duplicate if necessary to fill batch num_samples = len(data) if num_samples < self.batch_size: idx = np.concatenate((np.tile(np.arange(num_samples), (self.batch_size // num_samples, )), np.random.permutation(num_samples)[:(self.batch_size % num_samples)]), axis=0) data, label = [data[i] for i in idx], [label[i] for i in idx] num_samples = self.batch_size # shuffle samples idx = np.random.permutation(num_samples) data, label = [data[i] for i in idx], [label[i] for i in idx] # sample to a fixed length '''for i in range(num_samples): k = len(data[i]) idx = np.concatenate((np.tile(np.arange(k), (self.sample_size // k, )), np.random.permutation(k)[:(self.sample_size % k)]), axis=0) data[i] = data[i][idx, :] label[i] = label[i][idx]''' # reshape and reset index self.data = data #np.array(data) #data[:, self.feat_dims].reshape(num_samples, self.sample_size, -1, 1).transpose(0, 2, 3, 1) self.label = label #np.array(label) #label.reshape(num_samples, self.sample_size, -1, 1).transpose(0, 2, 3, 1) self.index = 0
def eval_one_epoch(sess, ops, each_data): is_training = False ceiling_list = list() floor_list = list() wall_list = list() window_list = list() door_list = list() clutter_list = list() current_data, current_label = indoor3d_util.room2blocks_wrapper_normalized( each_data, NUM_POINT) current_data = current_data[:, 0:NUM_POINT, :] current_label = np.squeeze(current_label) # Get room dimension.. data_label = each_data data = data_label[:, 0:6] max_room_x = max(data[:, 0]) max_room_y = max(data[:, 1]) max_room_z = max(data[:, 2]) file_size = current_data.shape[0] num_batches = file_size // BATCH_SIZE for batch_idx in range(num_batches): start_idx = batch_idx * BATCH_SIZE end_idx = (batch_idx + 1) * BATCH_SIZE feed_dict = { ops['pointclouds_pl']: current_data[start_idx:end_idx, :, :], ops['labels_pl']: current_label[start_idx:end_idx], ops['is_training_pl']: is_training } pred_val = sess.run(ops['pred_softmax'], feed_dict=feed_dict) pred_label = np.argmax(pred_val, 2) for b in range(BATCH_SIZE): pts = current_data[start_idx + b, :, :] pts[:, 6] *= max_room_x pts[:, 7] *= max_room_y pts[:, 8] *= max_room_z pred = pred_label[b, :] for i in range(NUM_POINT): if pred[i] == 0: ceiling_list.append([pts[i, 6], pts[i, 7], pts[i, 8]]) if pred[i] == 1: floor_list.append([pts[i, 6], pts[i, 7], pts[i, 8]]) if pred[i] == 2: wall_list.append([pts[i, 6], pts[i, 7], pts[i, 8]]) if pred[i] == 3: window_list.append([pts[i, 6], pts[i, 7], pts[i, 8]]) if pred[i] == 4: door_list.append([pts[i, 6], pts[i, 7], pts[i, 8]]) if pred[i] == 5: clutter_list.append([pts[i, 6], pts[i, 7], pts[i, 8]]) return ceiling_list, floor_list, wall_list, window_list, door_list, clutter_list
def convert_to_h5_test(): sample_cnt = 0 filelist = os.path.join(BASE_DIR, 'shapes/all_data_label_test.txt') data_label_files = [ os.path.join(numpy_dir, line.rstrip()) for line in open(filelist) ] for i, data_label_filename in enumerate(data_label_files): print(f'data_label_filename = {Path(data_label_filename).name}') # Normalize the data here? data, label = indoor3d_util.room2blocks_wrapper_normalized( data_label_filename, NUM_POINT, block_size=1.0, stride=0.5, random_sample=False, sample_num=None) test_or_train_dir = 'test' if 'test' in Path( data_label_filename).name else 'train' for _ in range(data.shape[0]): if test_or_train_dir == 'test': fout_room_test.write( os.path.basename(data_label_filename)[0:-4] + '\n') elif test_or_train_dir == 'train': fout_room_train.write( os.path.basename(data_label_filename)[0:-4] + '\n') sample_cnt += data.shape[0] # Need to update is_last_batch if using test & train directories is_last_batch = i == len(data_label_files) - 1 output_dir_test_or_train = os.path.join(hdf5_data.as_posix(), test_or_train_dir) insert_batch(data, label, output_dir_test_or_train, is_last_batch) # insert_batch(data, label, output_dir, is_last_batch) fout_room.close() print("Total samples: {0}".format(sample_cnt))
def Get_h5(): print("Begin to generate room block(h5 file)...") data_label_files = [ os.path.join(indoor3d_data_dir, line.rstrip()) for line in open(filelist) ] fout_room = open(output_room_filelist, 'w') all_file = open(output_all_file, 'w') sample_cnt = 0 for i, data_label_filename in enumerate(tqdm(data_label_files)): #print(data_label_filename) data, label = indoor3d_util.room2blocks_wrapper_normalized( data_label_filename + ".npy", NUM_POINT, block_size=CONF.BLOCK_SIZE, stride=CONF.STRIDE_SIZE, random_sample=False, sample_num=None) #print('{0}, {1}'.format(data.shape, label.shape)) for _ in range(data.shape[0]): fout_room.write(os.path.basename(data_label_filename)[0:-4] + '\n') sample_cnt += data.shape[0] insert_batch(data, label, i == len(data_label_files) - 1) fout_room.close() print("Total samples: {0}".format(sample_cnt)) for i in range(h5_index): all_file.write( os.path.join(CONF.DATA_TYPE + '_h5_file', 'ply_data_all_') + str(i) + '.h5\n') all_file.close() print("generate room block(h5 file) finished!!")
def eval_one_epoch(sess, ops, room_path, out_data_label_filename, out_gt_label_filename): error_cnt = 0 is_training = False total_correct = 0 total_seen = 0 loss_sum = 0 total_seen_class = [0 for _ in range(NUM_CLASSES)] total_correct_class = [0 for _ in range(NUM_CLASSES)] if FLAGS.visu: fout = open(os.path.join(DUMP_DIR, os.path.basename(room_path)[:-4] + '_pred.obj'), 'w') fout_gt = open(os.path.join(DUMP_DIR, os.path.basename(room_path)[:-4] + '_gt.obj'), 'w') fout_data_label = open(out_data_label_filename, 'w') #fout_gt_label = open(out_gt_label_filename, 'w') current_data, current_label = indoor3d_util.room2blocks_wrapper_normalized(room_path, NUM_POINT) current_data = current_data[:, 0:NUM_POINT, :] current_label = np.squeeze(current_label) # Get room dimension.. data_label = np.load(room_path) data = data_label[:, 0:6] max_room_x = max(data[:, 0]) max_room_y = max(data[:, 1]) max_room_z = max(data[:, 2]) file_size = current_data.shape[0] num_batches = file_size // BATCH_SIZE print(file_size) for batch_idx in range(num_batches): start_idx = batch_idx * BATCH_SIZE end_idx = (batch_idx + 1) * BATCH_SIZE cur_batch_size = end_idx - start_idx feed_dict = {ops['pointclouds_pl']: current_data[start_idx:end_idx, :, :], #ops['labels_pl']: current_label[start_idx:end_idx], ops['is_training_pl']: is_training} pred_val = sess.run([ops['pred_softmax']], feed_dict=feed_dict) if FLAGS.no_clutter: pred_label = np.argmax(pred_val[:, :, 0:12], 2) # BxN else: pred_label = np.argmax(pred_val, 2) # BxN # Save prediction labels to OBJ file for b in range(BATCH_SIZE): pts = current_data[start_idx + b, :, :] #l = current_label[start_idx + b, :] pts[:, 6] *= max_room_x pts[:, 7] *= max_room_y pts[:, 8] *= max_room_z pts[:, 3:6] *= 255.0 pred = pred_label[b, :] for i in range(NUM_POINT): color = indoor3d_util.g_label2color[pred[i]] #color_gt = indoor3d_util.g_label2color[current_label[start_idx + b, i]] if FLAGS.visu: fout.write( 'v %f %f %f %d %d %d\n' % (pts[i, 6], pts[i, 7], pts[i, 8], color[0], color[1], color[2])) #fout_gt.write('v %f %f %f %d %d %d\n' % ( #pts[i, 6], pts[i, 7], pts[i, 8], color_gt[0], color_gt[1], color_gt[2])) fout_data_label.write('%f %f %f %d %d %d %f %d\n' % ( pts[i, 6], pts[i, 7], pts[i, 8], pts[i, 3], pts[i, 4], pts[i, 5], pred_val[b, i, pred[i]], pred[i])) #fout_gt_label.write('%d\n' % (l[i])) #correct = np.sum(pred_label == current_label[start_idx:end_idx, :]) #total_correct += correct total_seen += (cur_batch_size * NUM_POINT) #loss_sum += (loss_val * BATCH_SIZE) for i in range(start_idx, end_idx): for j in range(NUM_POINT): l = current_label[i, j] total_seen_class[l] += 1 total_correct_class[l] += (pred_label[i - start_idx, j] == l) #log_string('eval mean loss: %f' % (loss_sum / float(total_seen / NUM_POINT))) #log_string('eval accuracy: %f' % (total_correct / float(total_seen))) fout_data_label.close() #fout_gt_label.close() if FLAGS.visu: fout.close() #fout_gt.close() return total_seen
total_seen_class = [0 for _ in range(len(select_class_id_mappings))] total_loops = len(data_label_files) #for i, data_label_filename in enumerate(data_label_files): for i, data_label_filename in enumerate(data_label_files): TOTAL_LOOPS_CURRENT_RUN = TOTAL_LOOPS_CURRENT_RUN + 1 print('processing ' + str(i) + ' of ' + str(total_loops)) if TOTAL_LOOPS_CURRENT_RUN < TOTAL_PROCESSED: continue #print(data_label_filename) data, label = indoor3d_util.room2blocks_wrapper_normalized( data_label_filename, NUM_POINT, block_size=256.0, stride=128, random_sample=False, sample_num=None, label_selections=None) # label_selections=label_selections) for from_val, to_val in selected_class_id_to_train_id_conversion: idxs = label == from_val label[idxs] = to_val total_seen_class[to_val] += np.sum(idxs) print('{0}, {1}'.format(data.shape, label.shape)) for _ in range(data.shape[0]): fout_room.write(os.path.basename(data_label_filename)[0:-4] + '\n')
def test(): with tf.Graph().as_default(): with tf.device('/gpu:' + str(GPU_INDEX)): pointclouds_pl, labels_pl, sem_labels_pl = placeholder_inputs( BATCH_SIZE, NUM_POINT) is_training_pl = tf.placeholder(tf.bool, shape=()) # Get model and loss pred_sem, pred_ins = get_model(pointclouds_pl, is_training_pl, NUM_CLASSES) pred_sem_softmax = tf.nn.softmax(pred_sem) pred_sem_label = tf.argmax(pred_sem_softmax, axis=2) loss, sem_loss, disc_loss, l_var, l_dist, l_reg = get_loss( pred_ins, labels_pl, pred_sem_label, pred_sem, sem_labels_pl) loader = tf.train.Saver() # Create a session config = tf.ConfigProto() config.gpu_options.allow_growth = True config.allow_soft_placement = True config.log_device_placement = True sess = tf.Session(config=config) is_training = False # Restore variables from disk. loader.restore(sess, MODEL_PATH) log_string("Model restored.") ops = { 'pointclouds_pl': pointclouds_pl, 'labels_pl': labels_pl, 'sem_labels_pl': sem_labels_pl, 'is_training_pl': is_training_pl, 'pred_ins': pred_ins, 'pred_sem_label': pred_sem_label, 'pred_sem_softmax': pred_sem_softmax, 'loss': loss, 'l_var': l_var, 'l_dist': l_dist, 'l_reg': l_reg } total_acc = 0.0 total_seen = 0 ious = np.zeros(NEW_NUM_CLASSES) totalnums = np.zeros(NEW_NUM_CLASSES) total_gt_ins = np.zeros(NUM_CLASSES) at = 0.5 tpsins = [[] for itmp in range(NUM_CLASSES)] fpsins = [[] for itmp in range(NUM_CLASSES)] all_mean_cov = [[] for itmp in range(NUM_CLASSES)] all_mean_weighted_cov = [[] for itmp in range(NUM_CLASSES)] output_filelist_f = os.path.join(LOG_DIR, 'output_filelist.txt') fout_out_filelist = open(output_filelist_f, 'w') for shape_idx in range(len_pts_files): room_path = ROOM_PATH_LIST[shape_idx] log_string('%d / %d ...' % (shape_idx, len_pts_files)) log_string('Loading train file ' + room_path) out_data_label_filename = os.path.basename( room_path)[:-4] + '_pred.txt' out_data_label_filename = os.path.join(OUTPUT_DIR, out_data_label_filename) out_gt_label_filename = os.path.basename( room_path)[:-4] + '_gt.txt' out_gt_label_filename = os.path.join(OUTPUT_DIR, out_gt_label_filename) fout_data_label = open(out_data_label_filename, 'w') fout_gt_label = open(out_gt_label_filename, 'w') fout_out_filelist.write(out_data_label_filename + '\n') cur_data, cur_sem, cur_group = indoor3d_util.room2blocks_wrapper_normalized( room_path, NUM_POINT, block_size=1.0, stride=0.5, random_sample=False, sample_num=None) cur_data = cur_data[:, 0:NUM_POINT, :] cur_sem = np.squeeze(cur_sem) cur_group = np.squeeze(cur_group) # Get room dimension.. data_label = np.load(room_path) data = data_label[:, 0:6] max_room_x = max(data[:, 0]) max_room_y = max(data[:, 1]) max_room_z = max(data[:, 2]) cur_pred_sem = np.zeros_like(cur_sem) cur_pred_sem_softmax = np.zeros( [cur_sem.shape[0], cur_sem.shape[1], NUM_CLASSES]) group_output = np.zeros_like(cur_group) gap = 5e-3 volume_num = int(1. / gap) + 1 volume = -1 * np.ones([volume_num, volume_num, volume_num]).astype( np.int32) volume_seg = -1 * np.ones([volume_num, volume_num, volume_num ]).astype(np.int32) intersections = np.zeros(NEW_NUM_CLASSES) unions = np.zeros(NEW_NUM_CLASSES) num_data = cur_data.shape[0] for j in range(num_data): log_string("Processsing: Shape [%d] Block[%d]" % (shape_idx, j)) pts = cur_data[j, ...] group = cur_group[j] sem = cur_sem[j] feed_dict = { ops['pointclouds_pl']: np.expand_dims(pts, 0), ops['labels_pl']: np.expand_dims(group, 0), ops['sem_labels_pl']: np.expand_dims(sem, 0), ops['is_training_pl']: is_training } loss_val, l_var_val, l_dist_val, l_reg_val, pred_ins_val, pred_sem_label_val, pred_sem_softmax_val = sess.run( [ ops['loss'], ops['l_var'], ops['l_dist'], ops['l_reg'], ops['pred_ins'], ops['pred_sem_label'], ops['pred_sem_softmax'] ], feed_dict=feed_dict) pred_val = np.squeeze(pred_ins_val, axis=0) pred_sem = np.squeeze(pred_sem_label_val, axis=0) pred_sem_softmax = np.squeeze(pred_sem_softmax_val, axis=0) cur_pred_sem[j, :] = pred_sem cur_pred_sem_softmax[j, ...] = pred_sem_softmax # cluster group_seg = {} bandwidth = BANDWIDTH num_clusters, labels, cluster_centers = cluster( pred_val, bandwidth) for idx_cluster in range(num_clusters): tmp = (labels == idx_cluster) estimated_seg = int(stats.mode(pred_sem[tmp])[0]) group_seg[idx_cluster] = estimated_seg groupids_block = labels groupids = BlockMerging(volume, volume_seg, pts[:, 6:], groupids_block.astype(np.int32), group_seg, gap) group_output[j, :] = groupids total_acc += float(np.sum(pred_sem == sem)) / pred_sem.shape[0] total_seen += 1 group_pred = group_output.reshape(-1) seg_pred = cur_pred_sem.reshape(-1) seg_pred_softmax = cur_pred_sem_softmax.reshape([-1, NUM_CLASSES]) pts = cur_data.reshape([-1, 9]) # filtering x = (pts[:, 6] / gap).astype(np.int32) y = (pts[:, 7] / gap).astype(np.int32) z = (pts[:, 8] / gap).astype(np.int32) for i in range(group_pred.shape[0]): if volume[x[i], y[i], z[i]] != -1: group_pred[i] = volume[x[i], y[i], z[i]] seg_gt = cur_sem.reshape(-1) un = np.unique(group_pred) pts_in_pred = [[] for itmp in range(NUM_CLASSES)] group_pred_final = -1 * np.ones_like(group_pred) grouppred_cnt = 0 for ig, g in enumerate(un): #each object in prediction if g == -1: continue tmp = (group_pred == g) sem_seg_g = int(stats.mode(seg_pred[tmp])[0]) #if np.sum(tmp) > 500: if np.sum(tmp) > 0.25 * mean_num_pts_in_group[sem_seg_g]: group_pred_final[tmp] = grouppred_cnt pts_in_pred[sem_seg_g] += [tmp] grouppred_cnt += 1 if output_verbose: #output_color_point_cloud(pts[:, 6:], group_pred_final.astype(np.int32), # os.path.join(OUTPUT_DIR, '%d_grouppred.obj' % (shape_idx))) pts[:, 6] *= max_room_x pts[:, 7] *= max_room_y pts[:, 8] *= max_room_z pts[:, 3:6] *= 255.0 ins = group_pred_final.astype(np.int32) sem = seg_pred.astype(np.int32) sem_softmax = seg_pred_softmax sem_gt = seg_gt ins_gt = cur_group.reshape(-1) for i in range(pts.shape[0]): fout_data_label.write( '%f %f %f %d %d %d %f %d %d\n' % (pts[i, 6], pts[i, 7], pts[i, 8], pts[i, 3], pts[i, 4], pts[i, 5], sem_softmax[i, sem[i]], sem[i], ins[i])) fout_gt_label.write('%d %d\n' % (sem_gt[i], ins_gt[i])) fout_data_label.close() fout_gt_label.close() fout_out_filelist.close()
h5_batch_label[0:buffer_size, ...], data_dtype, label_dtype) print(('Stored {0} with size {1}'.format(h5_filename, buffer_size))) h5_index += 1 buffer_size = 0 return sample_cnt = 0 for i, data_label_filename in enumerate(data_label_files): print(data_label_filename) filename = data_label_filename.split('/')[-1].split('.')[0] data, label = indoor3d_util.room2blocks_wrapper_normalized( data_label_filename, NUM_POINT, block_size=1.0, stride=0.5, random_sample=False, sample_num=None, filename=filename) print(('{0}, {1}'.format(data.shape, label.shape))) for idx in range(data.shape[0]): fout_room.write( str(idx) + '_' + os.path.basename(data_label_filename)[0:-4] + '\n') fout_room_pf.write(str(idx) + '_' + filename + '\n') fout_room_rgb.write(str(idx) + '_' + filename + '\n') sample_cnt += data.shape[0] insert_batch(data, label, i == len(data_label_files) - 1) fout_room.close()
def eval_one_epoch(sess, ops, test_writer): global BEST_MEAN_IOU global BEST_ALL_ACC global BEST_CLS_ACC log_string('evaluation') is_training = False gt_classes = [0 for _ in range(13)] positive_classes = [0 for _ in range(13)] true_positive_classes = [0 for _ in range(13)] for room_path in TEST_ROOM_PATH_LIST: current_data, current_label = indoor3d_util.room2blocks_wrapper_normalized(room_path, NUM_POINT) current_data = current_data[:,0:NUM_POINT,:] current_label = np.squeeze(current_label) data_label = np.load(room_path) data = data_label[:,0:6] max_room_x = max(data[:,0]) max_room_y = max(data[:,1]) max_room_z = max(data[:,2]) file_size = current_data.shape[0] num_batches = file_size // BATCH_SIZE # print(file_size) for batch_idx in range(num_batches): start_idx = batch_idx * BATCH_SIZE end_idx = min((batch_idx+1) * BATCH_SIZE, file_size) cur_batch_size = end_idx - start_idx feed_dict = {ops['pointclouds_pl']: current_data[start_idx:end_idx, :, :], ops['labels_pl']: current_label[start_idx:end_idx], ops['is_training_pl']: is_training} loss_val, pred_val = sess.run([ops['loss'], ops['pred']], feed_dict=feed_dict) pred_label = np.argmax(pred_val, 2) for i in range(start_idx, end_idx): for j in range(NUM_POINT): gt_l = int(current_label[i, j]) pred_l = int(pred_label[i-start_idx, j]) gt_classes[gt_l] += 1 positive_classes[pred_l] += 1 true_positive_classes[gt_l] += int(gt_l==pred_l) current_all_acc = (sum(true_positive_classes)/float(sum(positive_classes))) log_string('overall accuracy: %f' % current_all_acc) class_list = [] for i in range(13): acc_class = true_positive_classes[i]/float(gt_classes[i]) class_list.append(acc_class) current_cls_acc = (sum(class_list)/13.0) log_string('avg class accuracy: %f' % current_cls_acc) log_string ('IoU: ') iou_list = [] for i in range(13): iou = true_positive_classes[i]/float(gt_classes[i]+positive_classes[i]-true_positive_classes[i]) log_string('%f' % iou) iou_list.append(iou) current_mean_iou = (sum(iou_list)/13.0) log_string('avg IoU %f' % current_mean_iou) best_all_acc_flag, best_cls_acc_flag, best_mean_iou_flag = False, False, False if current_all_acc > BEST_ALL_ACC: BEST_ALL_ACC = current_all_acc best_all_acc_flag = True if current_cls_acc > BEST_CLS_ACC: BEST_CLS_ACC = current_cls_acc best_cls_acc_flag = True if current_mean_iou > BEST_MEAN_IOU: BEST_MEAN_IOU = current_mean_iou best_mean_iou_flag = True log_string('best_all_acc: %f' % BEST_ALL_ACC) log_string('best_cls_acc: %f' % BEST_CLS_ACC) log_string('best_mean_iou: %f' % BEST_MEAN_IOU) return best_all_acc_flag, best_cls_acc_flag, best_mean_iou_flag
h5_filename = output_filename_prefix + '_' + str(h5_index) + '.h5' data_prep_util.save_h5(h5_filename, h5_batch_data, h5_batch_label, data_dtype, label_dtype) print('Stored {0} with size {1}'.format(h5_filename, h5_batch_data.shape[0])) h5_index += 1 buffer_size = 0 # recursive call insert_batch(data[capacity:, ...], label[capacity:, ...], last_batch) if last_batch and buffer_size > 0: h5_filename = output_filename_prefix + '_' + str(h5_index) + '.h5' data_prep_util.save_h5(h5_filename, h5_batch_data[0:buffer_size, ...], h5_batch_label[0:buffer_size, ...], data_dtype, label_dtype) print('Stored {0} with size {1}'.format(h5_filename, buffer_size)) h5_index += 1 buffer_size = 0 return sample_cnt = 0 for i, data_label_filename in enumerate(data_label_files): print(data_label_filename) data, label = indoor3d_util.room2blocks_wrapper_normalized(data_label_filename, NUM_POINT, block_size=1.0, stride=0.5, random_sample=False, sample_num=None) print('{0}, {1}'.format(data.shape, label.shape)) for _ in range(data.shape[0]): fout_room.write(os.path.basename(data_label_filename)[0:-4]+'\n') sample_cnt += data.shape[0] insert_batch(data, label, i == len(data_label_files)-1) fout_room.close() print("Total samples: {0}".format(sample_cnt))
def test(): with tf.Graph().as_default(): with tf.device('/gpu:' + str(GPU_INDEX)): pointclouds_pl, sem_labels_pl = placeholder_inputs(BATCH_SIZE, NUM_POINT) is_training_pl = tf.placeholder(tf.bool, shape=()) # Get model and loss pred_sem, end_points = get_model(pointclouds_pl, NUM_CLASSES, is_training_pl, extra_constraint=False) pred_sem_softmax = tf.nn.softmax(pred_sem) pred_sem_label = tf.argmax(pred_sem_softmax, axis=2) loss = get_loss(pred_sem, sem_labels_pl, end_points, False) loader = tf.train.Saver() # Create a session config = tf.ConfigProto() config.gpu_options.allow_growth = True config.allow_soft_placement = True config.log_device_placement = True sess = tf.Session(config=config) is_training = False # Restore variables from disk. loader.restore(sess, MODEL_PATH) log_string("Model restored.") ops = {'pointclouds_pl': pointclouds_pl, 'sem_labels_pl': sem_labels_pl, 'is_training_pl': is_training_pl, 'pred_sem_label': pred_sem_label, 'pred_sem_softmax': pred_sem_softmax, 'loss': loss} total_acc = 0.0 total_seen = 0 output_filelist_f = os.path.join(LOG_DIR, 'output_filelist.txt') fout_out_filelist = open(output_filelist_f, 'w') for shape_idx in range(len_pts_files): room_path = ROOM_PATH_LIST[shape_idx] log_string('%d / %d ...' % (shape_idx, len_pts_files)) log_string('Loading train file ' + room_path) out_data_label_filename = os.path.basename(room_path)[:-4] + '_pred.txt' out_data_label_filename = os.path.join(OUTPUT_DIR, out_data_label_filename) out_gt_label_filename = os.path.basename(room_path)[:-4] + '_gt.txt' out_gt_label_filename = os.path.join(OUTPUT_DIR, out_gt_label_filename) fout_data_label = open(out_data_label_filename, 'w') fout_gt_label = open(out_gt_label_filename, 'w') fout_out_filelist.write(out_data_label_filename+'\n') cur_data, cur_sem, _ = indoor3d_util.room2blocks_wrapper_normalized(room_path, NUM_POINT, block_size=1.0, stride=0.5, random_sample=False, sample_num=None) cur_data = cur_data[:, 0:NUM_POINT, :] cur_sem = np.squeeze(cur_sem) # Get room dimension.. data_label = np.load(room_path) data = data_label[:, 0:6] max_room_x = max(data[:, 0]) max_room_y = max(data[:, 1]) max_room_z = max(data[:, 2]) cur_pred_sem = np.zeros_like(cur_sem) cur_pred_sem_softmax = np.zeros([cur_sem.shape[0], cur_sem.shape[1], NUM_CLASSES]) num_data = cur_data.shape[0] for j in range(num_data): log_string("Processsing: Shape [%d] Block[%d]"%(shape_idx, j)) pts = cur_data[j,...] sem = cur_sem[j] feed_dict = {ops['pointclouds_pl']: np.expand_dims(pts, 0), ops['sem_labels_pl']: np.expand_dims(sem, 0), ops['is_training_pl']: is_training} _, pred_sem_label_val, pred_sem_softmax_val = sess.run( [ops['loss'], ops['pred_sem_label'], ops['pred_sem_softmax']], feed_dict=feed_dict) pred_sem = np.squeeze(pred_sem_label_val, axis=0) pred_sem_softmax = np.squeeze(pred_sem_softmax_val, axis=0) cur_pred_sem[j, :] = pred_sem cur_pred_sem_softmax[j, ...] = pred_sem_softmax total_acc += float(np.sum(pred_sem==sem))/pred_sem.shape[0] total_seen += 1 seg_pred = cur_pred_sem.reshape(-1) seg_pred_softmax = cur_pred_sem_softmax.reshape([-1, NUM_CLASSES]) pts = cur_data.reshape([-1, 9]) seg_gt = cur_sem.reshape(-1) if output_verbose: pts[:, 6] *= max_room_x pts[:, 7] *= max_room_y pts[:, 8] *= max_room_z pts[:, 3:6] *= 255.0 sem = seg_pred.astype(np.int32) sem_softmax = seg_pred_softmax sem_gt = seg_gt for i in range(pts.shape[0]): fout_data_label.write('%f %f %f %d %d %d %f %d\n' % ( pts[i, 6], pts[i, 7], pts[i, 8], pts[i, 3], pts[i, 4], pts[i, 5], sem_softmax[i, sem[i]], sem[i])) fout_gt_label.write('%d\n' % (sem_gt[i])) fout_data_label.close() fout_gt_label.close() fout_out_filelist.close()
def test(): with tf.Graph().as_default(): with tf.device('/gpu:' + str(GPU_INDEX)): pointclouds_pl, labels_pl, sem_labels_pl = placeholder_inputs( BATCH_SIZE, NUM_POINT) is_training_pl = tf.placeholder(tf.bool, shape=()) # Get model pred_sem, pred_ins = get_model(pointclouds_pl, is_training_pl, NUM_CLASSES) pred_sem_softmax = tf.nn.softmax(pred_sem) pred_sem_label = tf.argmax(pred_sem_softmax, axis=2) loader = tf.train.Saver() # Create a session config = tf.ConfigProto() config.gpu_options.allow_growth = True config.allow_soft_placement = True config.log_device_placement = False sess = tf.Session(config=config) is_training = False # Restore variables from disk. loader.restore(sess, MODEL_PATH) logger.info("Model restored from {}".format(MODEL_PATH)) ops = { 'pointclouds_pl': pointclouds_pl, 'labels_pl': labels_pl, 'sem_labels_pl': sem_labels_pl, 'is_training_pl': is_training_pl, 'pred_ins': pred_ins, 'pred_sem_label': pred_sem_label, 'pred_sem_softmax': pred_sem_softmax } total_acc = 0.0 total_seen = 0 output_filelist_f = os.path.join(LOG_DIR, 'output_filelist.txt') fout_out_filelist = [] for shape_idx in range(len_pts_files): room_path = ROOM_PATH_LIST[shape_idx] out_data_label_filename = os.path.basename( room_path)[:-EXT_LEN] + '_pred.txt' out_data_label_filename = os.path.join(OUTPUT_DIR, out_data_label_filename) out_gt_label_filename = os.path.basename( room_path)[:-EXT_LEN] + '_gt.txt' out_gt_label_filename = os.path.join(OUTPUT_DIR, out_gt_label_filename) fout_data_label = [] fout_gt_label = [] fout_out_filelist.append(out_data_label_filename + '\n') logger.info('%d / %d ...' % (shape_idx, len_pts_files)) logger.info('Loading file ' + room_path) size_path = room_path if FILE_TYPE == 'hdf5': size_path = size_path.replace('indoor3d_ins_seg_hdf5', 'stanford_indoor3d_ins.sem') size_path = "{}.npy".format(size_path[:-3]) cur_data, cur_group, _, cur_sem = \ provider.loadDataFile_with_groupseglabel_stanfordindoor(room_path) elif FILE_TYPE == 'numpy': cur_data, cur_sem, cur_group = \ indoor3d_util.room2blocks_wrapper_normalized(room_path, NUM_POINT, block_size=1.0, stride=0.5, random_sample=False, sample_num=None) cur_data = cur_data[:, 0:NUM_POINT, :] cur_sem = np.squeeze(cur_sem) cur_group = np.squeeze(cur_group) # Get room dimension.. data_label = np.load(size_path) data = data_label[:, 0:6] max_room_x = max(data[:, 0]) max_room_y = max(data[:, 1]) max_room_z = max(data[:, 2]) cur_pred_sem = np.zeros_like(cur_sem) cur_pred_sem_softmax = np.zeros( [cur_sem.shape[0], cur_sem.shape[1], NUM_CLASSES]) group_output = np.zeros_like(cur_group) gap = 5e-3 volume_num = int(1. / gap) + 1 volume = -1 * np.ones([volume_num, volume_num, volume_num]).astype( np.int32) volume_seg = -1 * np.ones([volume_num, volume_num, volume_num ]).astype(np.int32) num_data = cur_data.shape[0] for j in range(num_data): logger.info("Processsing: Shape [%d] Block[%d]" % (shape_idx, j)) pts = cur_data[j, ...] group = cur_group[j] sem = cur_sem[j] feed_dict = { ops['pointclouds_pl']: np.expand_dims(pts, 0), ops['labels_pl']: np.expand_dims(group, 0), ops['sem_labels_pl']: np.expand_dims(sem, 0), ops['is_training_pl']: is_training } pred_ins_val, pred_sem_label_val, pred_sem_softmax_val = sess.run( [ ops['pred_ins'], ops['pred_sem_label'], ops['pred_sem_softmax'] ], feed_dict=feed_dict) pred_val = np.squeeze(pred_ins_val, axis=0) pred_sem = np.squeeze(pred_sem_label_val, axis=0) pred_sem_softmax = np.squeeze(pred_sem_softmax_val, axis=0) cur_pred_sem[j, :] = pred_sem cur_pred_sem_softmax[j, ...] = pred_sem_softmax # cluster group_seg = {} bandwidth = BANDWIDTH num_clusters, labels, cluster_centers = cluster( pred_val, bandwidth) for idx_cluster in range(num_clusters): tmp = (labels == idx_cluster) estimated_seg = int(stats.mode(pred_sem[tmp])[0]) group_seg[idx_cluster] = estimated_seg groupids_block = labels groupids = BlockMerging(volume, volume_seg, pts[:, 6:], groupids_block.astype(np.int32), group_seg, gap) group_output[j, :] = groupids total_acc += float(np.sum(pred_sem == sem)) / pred_sem.shape[0] total_seen += 1 group_pred = group_output.reshape(-1) seg_pred = cur_pred_sem.reshape(-1) seg_pred_softmax = cur_pred_sem_softmax.reshape([-1, NUM_CLASSES]) pts = cur_data.reshape([-1, 9]) # filtering x = (pts[:, 6] / gap).astype(np.int32) y = (pts[:, 7] / gap).astype(np.int32) z = (pts[:, 8] / gap).astype(np.int32) for i in range(group_pred.shape[0]): if volume[x[i], y[i], z[i]] != -1: group_pred[i] = volume[x[i], y[i], z[i]] seg_gt = cur_sem.reshape(-1) un = np.unique(group_pred) pts_in_pred = [[] for itmp in range(NUM_CLASSES)] group_pred_final = -1 * np.ones_like(group_pred) grouppred_cnt = 0 for ig, g in enumerate(un): # each object in prediction if g == -1: continue tmp = (group_pred == g) sem_seg_g = int(stats.mode(seg_pred[tmp])[0]) # if np.sum(tmp) > 500: if np.sum(tmp) > 0.25 * mean_num_pts_in_group[sem_seg_g]: group_pred_final[tmp] = grouppred_cnt pts_in_pred[sem_seg_g] += [tmp] grouppred_cnt += 1 pts[:, 6] *= max_room_x pts[:, 7] *= max_room_y pts[:, 8] *= max_room_z pts[:, 3:6] *= 255.0 ins = group_pred_final.astype(np.int32) sem = seg_pred.astype(np.int32) sem_softmax = seg_pred_softmax sem_gt = seg_gt ins_gt = cur_group.reshape(-1) for i in range(pts.shape[0]): fout_data_label.append( '%f %f %f %d %d %d %f %d %d\n' % (pts[i, 6], pts[i, 7], pts[i, 8], pts[i, 3], pts[i, 4], pts[i, 5], sem_softmax[i, sem[i]], sem[i], ins[i])) fout_gt_label.append('%d %d\n' % (sem_gt[i], ins_gt[i])) with open(out_data_label_filename, 'w') as fd: fd.writelines(fout_data_label) with open(out_gt_label_filename, 'w') as fd: fd.writelines(fout_gt_label) if output_verbose: # file name outfile_name = ROOM_PATH_LIST[shape_idx].split( '/')[-1][:-EXT_LEN] # Raw Point Cloud output_point_cloud_rgb( pts[:, 6:], pts[:, 3:6].astype(np.int32), os.path.join(VIS_DIR, '{}_raw.obj'.format(outfile_name))) logger.info('Saving file {}_raw.obj'.format(outfile_name)) # Instance Prediction output_color_point_cloud( pts[:, 6:], group_pred_final.astype(np.int32), os.path.join(VIS_DIR, '{}_pred_ins.obj'.format(outfile_name))) logger.info('Saving file {}_pred_ins.obj'.format(outfile_name)) # Semantic Prediction output_color_point_cloud( pts[:, 6:], seg_pred.astype(np.int32), os.path.join(VIS_DIR, '{}_pred_sem.obj'.format(outfile_name))) logger.info('Saving file {}_pred_sem.obj'.format(outfile_name)) # Instance Ground Truth output_color_point_cloud( pts[:, 6:], ins_gt, os.path.join(VIS_DIR, '{}_gt_ins.obj'.format(outfile_name))) logger.info('Saving file {}_gt_ins.obj'.format(outfile_name)) # Semantic Ground Truth output_color_point_cloud( pts[:, 6:], sem_gt, os.path.join(VIS_DIR, '{}_gt_sem.obj'.format(outfile_name))) logger.info('Saving file {}_gt_sem.obj'.format(outfile_name)) with open(output_filelist_f, 'w') as fd: fd.writelines(fout_out_filelist)
def eval_one_epoch(sess, ops, room_path, out_data_label_filename, out_gt_label_filename): error_cnt = 0 is_training = False total_correct = 0 total_seen = 0 loss_sum = 0 total_seen_class = [0 for _ in range(NUM_CLASSES)] total_correct_class = [0 for _ in range(NUM_CLASSES)] if FLAGS.visu: print("entering visu ") #fout = open(os.path.join(DUMP_DIR, os.path.basename(room_path)[:-4]+'_pred.obj'), 'w') fout = open(os.path.join(DUMP_DIR, os.path.basename(room_path)[:-4]+'_pred.obj'), 'w') #fout_gt = open(os.path.join(DUMP_DIR, os.path.basename(room_path)[:-4]+'_gt.obj'), 'w') fout_gt = open(os.path.join(DUMP_DIR, os.path.basename(room_path)[:-4]+'_gt.obj'), 'w') fout_real_color = open(os.path.join(DUMP_DIR, os.path.basename(room_path)[:-4]+'_real_color.obj'), 'w') fout_data_label = open(out_data_label_filename, 'w') fout_gt_label = open(out_gt_label_filename, 'w') print("Entering normalized data") current_data, current_label, koefisien = indoor3d_util.room2blocks_wrapper_normalized(room_path, NUM_POINT, block_size=block_size, stride=block_size, rgb=RGB) print("Done normalized data") current_data = current_data[:,0:NUM_POINT,:] koefisien = koefisien[:, 0:NUM_POINT, :] current_label = np.squeeze(current_label) # Get room dimension.. data_label = np.load(room_path) if RGB: data = data_label[:,0:6] else: data = data_label[:, 0:3] max_room_x = max(data[:,0]) max_room_y = max(data[:,1]) max_room_z = max(data[:,2]) print(max_room_x, max_room_y, max_room_z) file_size = current_data.shape[0] num_batches = file_size // BATCH_SIZE print(file_size) for batch_idx in range(num_batches): start_idx = batch_idx * BATCH_SIZE end_idx = (batch_idx+1) * BATCH_SIZE cur_batch_size = end_idx - start_idx feed_dict = {ops['pointclouds_pl']: current_data[start_idx:end_idx, :, :], ops['labels_pl']: current_label[start_idx:end_idx], ops['is_training_pl']: is_training} loss_val, pred_val = sess.run([ops['loss'], ops['pred_softmax']], feed_dict=feed_dict) if FLAGS.no_clutter: pred_label = np.argmax(pred_val[:,:,0:12], 2) # BxN else: pred_label = np.argmax(pred_val, 2) # BxN # Save prediction labels to OBJ file for b in range(BATCH_SIZE): pts = current_data[start_idx+b, :, :] koef = koefisien[start_idx+b, :, :] l = current_label[start_idx+b,:] if RGB: pts[:,6] *= max_room_x pts[:,7] *= max_room_y pts[:,8] *= max_room_z pts[:, 3:6] *= 65535.0 else: pts[:, 3] *= max_room_x pts[:, 4] *= max_room_y pts[:, 5] *= max_room_z pred = pred_label[b, :] for i in range(NUM_POINT): key = f'{pts[i,0]+koef[i,0]},{pts[i,1]+koef[i,1]},{pts[i,2]}' real_pts = MAPPING[key].split(",") color = indoor3d_util.g_label2color[pred[i]] color_gt = indoor3d_util.g_label2color[current_label[start_idx+b, i]] if FLAGS.visu: if RGB: # fout.write('v %f %f %f %d %d %d\n' % (pts[i,6], pts[i,7], pts[i,8], color[0], color[1], color[2])) # fout_gt.write('v %f %f %f %d %d %d\n' % (pts[i,6], pts[i,7], pts[i,8], color_gt[0], color_gt[1], color_gt[2])) pass else: # fout.write('v %f %f %f %d %d %d\n' % (pts[i, 3], pts[i, 4], pts[i, 5], color[0], color[1], color[2])) # fout_gt.write('v %f %f %f %d %d %d\n' % (pts[i, 3], pts[i, 4], pts[i, 5], color_gt[0], color_gt[1], color_gt[2])) pass fout.write(f'v {real_pts[0]} {real_pts[1]} {real_pts[2]} {color[0]} {color[1]} {color[2]}\n') fout_gt.write(f'v {real_pts[0]} {real_pts[1]} {real_pts[2]} {color_gt[0]} {color_gt[1]} {color_gt[2]}\n') if RGB: #fout_data_label.write(f'{real_pts[0]} {real_pts[1]} {real_pts[2]} {color_gt[0]} {color_gt[1]} {color_gt[2]}\n') fout_data_label.write(f'{real_pts[0]} {real_pts[1]} {real_pts[2]} {pts[i,3]} {pts[i,4]} {pts[i,5]} {pred_val[b,i,pred[i]]} {pred[i]}\n') else: fout_data_label.write(f'{real_pts[0]} {real_pts[1]} {real_pts[2]} {pts[i,0]} {pts[i,1]} {pts[i,2]} {pred_val[b,i,pred[i]]} {pred[i]}\n') #fout_data_label.write(f'{real_pts[0]} {real_pts[1]} {real_pts[2]} {color_gt[0]} {color_gt[1]} {color_gt[2]}\n') #fout_gt_label.write(f'v {real_pts[0]} {real_pts[1]} {real_pts[2]} {color_gt[0]} {color_gt[1]} {color_gt[2]}\n') #fout_gt_label.write(f'{real_pts[0]} {real_pts[1]} {real_pts[2]} {color_gt[0]} {color_gt[1]} {color_gt[2]}\n') fout_gt_label.write('%d\n' % (l[i])) correct = np.sum(pred_label == current_label[start_idx:end_idx,:]) total_correct += correct total_seen += (cur_batch_size*NUM_POINT) loss_sum += (loss_val*BATCH_SIZE) for i in range(start_idx, end_idx): for j in range(NUM_POINT): l = current_label[i, j] total_seen_class[l] += 1 total_correct_class[l] += (pred_label[i-start_idx, j] == l) log_string('eval mean loss: %f' % (loss_sum / float(total_seen/NUM_POINT))) log_string('eval accuracy: %f'% (total_correct / float(total_seen))) fout_data_label.close() fout_gt_label.close() if FLAGS.visu: fout.close() fout_gt.close() return total_correct, total_seen
purify = False # Reassign label based on k-nearest neighbor. Set to False for large point cloud due to slow speed knn = 5 # for the purify test_model=3 test_area=3 ckpt_dir = './log{}'.format(test_model) output_dir = os.path.join(BASE_DIR, './log{}/test'.format(test_model)) flog = open(os.path.join(output_dir, 'log_test.txt'), 'w') TESTING_FILE_LIST = './meta/area{}_data_label.txt'.format(test_area) ROOM_PATH_LIST = [line.rstrip() for line in open(TESTING_FILE_LIST)] test_data = [] test_sem = [] for room_path in ROOM_PATH_LIST: current_data, current_label = indoor3d_util.room2blocks_wrapper_normalized(room_path, model.NUM_POINT) current_data = current_data[:, :, :3] current_label = np.squeeze(current_label) test_data.append(current_data) test_sem.append(current_label) test_data = np.concatenate(test_data, axis=0) test_label = np.concatenate(test_sem, axis=0) print 'test_model:', test_model print 'test_area:', test_area print 'test_data:', test_data.shape print 'test_label:', test_label.shape def printout(flog, data): print(data) flog.write(data + '\n')
def eval_one_epoch(sess, ops, room_path, out_data_label_filename, out_gt_label_filename): error_cnt = 0 is_training = False total_correct = 0 total_seen = 0 loss_sum = 0 total_seen_class = [0 for _ in range(NUM_CLASSES)] total_correct_class = [0 for _ in range(NUM_CLASSES)] if FLAGS.visu: fout = open(os.path.join(DUMP_DIR, os.path.basename(room_path)[:-4]+'_pred.obj'), 'w') fout_gt = open(os.path.join(DUMP_DIR, os.path.basename(room_path)[:-4]+'_gt.obj'), 'w') fout_data_label = open(out_data_label_filename, 'w') fout_gt_label = open(out_gt_label_filename, 'w') current_data, current_label = indoor3d_util.room2blocks_wrapper_normalized(room_path, NUM_POINT) current_data = current_data[:,0:NUM_POINT,:] current_label = np.squeeze(current_label) # Get room dimension.. data_label = np.load(room_path) data = data_label[:,0:6] max_room_x = max(data[:,0]) max_room_y = max(data[:,1]) max_room_z = max(data[:,2]) file_size = current_data.shape[0] num_batches = file_size // BATCH_SIZE print(file_size) for batch_idx in range(num_batches): start_idx = batch_idx * BATCH_SIZE end_idx = (batch_idx+1) * BATCH_SIZE cur_batch_size = end_idx - start_idx feed_dict = {ops['pointclouds_pl']: current_data[start_idx:end_idx, :, :], ops['labels_pl']: current_label[start_idx:end_idx], ops['is_training_pl']: is_training} loss_val, pred_val = sess.run([ops['loss'], ops['pred_softmax']], feed_dict=feed_dict) if FLAGS.no_clutter: pred_label = np.argmax(pred_val[:,:,0:12], 2) # BxN else: pred_label = np.argmax(pred_val, 2) # BxN # Save prediction labels to OBJ file for b in range(BATCH_SIZE): pts = current_data[start_idx+b, :, :] l = current_label[start_idx+b,:] pts[:,6] *= max_room_x pts[:,7] *= max_room_y pts[:,8] *= max_room_z pts[:,3:6] *= 255.0 pred = pred_label[b, :] for i in range(NUM_POINT): color = indoor3d_util.g_label2color[pred[i]] color_gt = indoor3d_util.g_label2color[current_label[start_idx+b, i]] if FLAGS.visu: fout.write('v %f %f %f %d %d %d\n' % (pts[i,6], pts[i,7], pts[i,8], color[0], color[1], color[2])) fout_gt.write('v %f %f %f %d %d %d\n' % (pts[i,6], pts[i,7], pts[i,8], color_gt[0], color_gt[1], color_gt[2])) fout_data_label.write('%f %f %f %d %d %d %f %d\n' % (pts[i,6], pts[i,7], pts[i,8], pts[i,3], pts[i,4], pts[i,5], pred_val[b,i,pred[i]], pred[i])) fout_gt_label.write('%d\n' % (l[i])) correct = np.sum(pred_label == current_label[start_idx:end_idx,:]) total_correct += correct total_seen += (cur_batch_size*NUM_POINT) loss_sum += (loss_val*BATCH_SIZE) for i in range(start_idx, end_idx): for j in range(NUM_POINT): l = current_label[i, j] total_seen_class[l] += 1 total_correct_class[l] += (pred_label[i-start_idx, j] == l) log_string('eval mean loss: %f' % (loss_sum / float(total_seen/NUM_POINT))) log_string('eval accuracy: %f'% (total_correct / float(total_seen))) fout_data_label.close() fout_gt_label.close() if FLAGS.visu: fout.close() fout_gt.close() return total_correct, total_seen