net = tf_utils.fully_connected(y_tilde, 1024, bn=True, is_training=is_training, scope='fc1', bn_decay=bn_decay) net = tf_utils.fully_connected(net, 1024, bn=True, is_training=is_training, scope='fc2', bn_decay=bn_decay) net = tf_utils.fully_connected(net, 2048 * 3, activation_fn=None, scope='fc3') net = tf.reshape(net, (batch_size, 2048, 3)) return net if __name__ == '__main__': tf.enable_eager_execution() TRAIN_DATASET = part_dataset.PartDataset( root='/data/dataset/shapenetcore_partanno_segmentation_benchmark_v0', npoints=2048, classification=False, class_choice=None, split='trainval') print('=============') print(input_fn(TRAIN_DATASET, 2, 8, repeat=True, prefetch_size=6))
os.system('cp %s %s' % (MODEL_FILE, LOG_DIR)) # bkp of model def os.system('cp train.py %s' % (LOG_DIR)) # bkp of train procedure LOG_FOUT = open(os.path.join(LOG_DIR, 'log_train.txt'), 'w') LOG_FOUT.write(str(FLAGS)+'\n') BN_INIT_DECAY = 0.5 BN_DECAY_DECAY_RATE = 0.5 BN_DECAY_DECAY_STEP = float(DECAY_STEP) BN_DECAY_CLIP = 0.99 HOSTNAME = socket.gethostname() # Shapenet official train/test split #DATA_PATH = os.path.join(BASE_DIR, 'data/shapenetcore_partanno_segmentation_benchmark_v0') DATA_PATH = '/home/sohee/code/dataset/shapenet/shapenetcore_partanno_segmentation_benchmark_v0' TRAIN_DATASET = part_dataset.PartDataset(root=DATA_PATH, npoints=NUM_POINT, classification=False, class_choice=FLAGS.category, split='trainval') TEST_DATASET = part_dataset.PartDataset(root=DATA_PATH, npoints=NUM_POINT, classification=False, class_choice=FLAGS.category, split='test') def log_string(out_str): LOG_FOUT.write(out_str+'\n') LOG_FOUT.flush() print(out_str) def get_learning_rate(batch): learning_rate = tf.train.exponential_decay( BASE_LEARNING_RATE, # Base learning rate. batch * BATCH_SIZE, # Current index into the dataset. DECAY_STEP, # Decay step. DECAY_RATE, # Decay rate. staircase=True) learing_rate = tf.maximum(learning_rate, 0.00001) # CLIP THE LEARNING RATE!
type=int, default=1, help= 'Number of groups of generated points -- used for hierarchical FC decoder. [default: 1]' ) FLAGS = parser.parse_args() MODEL_PATH = FLAGS.model_path GPU_INDEX = FLAGS.gpu NUM_POINT = FLAGS.num_point MODEL = importlib.import_module(FLAGS.model) # import network module DATA_PATH = os.path.join( BASE_DIR, 'data/shapenetcore_partanno_segmentation_benchmark_v0') TEST_DATASET = part_dataset.PartDataset(root=DATA_PATH, npoints=NUM_POINT, classification=False, class_choice=FLAGS.category, split='test', normalize=True) print(len(TEST_DATASET)) def get_model(batch_size, num_point): with tf.Graph().as_default(): with tf.device('/gpu:' + str(GPU_INDEX)): pointclouds_pl, labels_pl = MODEL.placeholder_inputs( batch_size, num_point) is_training_pl = tf.placeholder(tf.bool, shape=()) pred, end_points = MODEL.get_model(pointclouds_pl, is_training_pl) loss = MODEL.get_loss(pred, labels_pl, end_points) saver = tf.train.Saver() # Create a session