Beispiel #1
0
BN_INIT_DECAY = 0.5
BN_DECAY_DECAY_RATE = 0.5
BN_DECAY_DECAY_STEP = float(DECAY_STEP)
BN_DECAY_CLIP = 0.99

HOSTNAME = socket.gethostname()

NUM_CLASSES = 40

# Shapenet official train/test split
if FLAGS.normal:
    assert (NUM_POINT <= 10000)
    DATA_PATH = os.path.join(ROOT_DIR, 'data/modelnet40_normal_resampled')
    TRAIN_DATASET = modelnet_dataset.ModelNetDataset(
        root=DATA_PATH,
        npoints=NUM_POINT,
        split='train',
        normal_channel=FLAGS.normal,
        batch_size=BATCH_SIZE)
    TEST_DATASET = modelnet_dataset.ModelNetDataset(
        root=DATA_PATH,
        npoints=NUM_POINT,
        split='test',
        normal_channel=FLAGS.normal,
        batch_size=BATCH_SIZE)
else:
    assert (NUM_POINT <= 2048)
    TRAIN_DATASET = modelnet_h5_dataset.ModelNetH5Dataset(
        os.path.join(BASE_DIR,
                     'data/modelnet40_ply_hdf5_2048/train_files.txt'),
        batch_size=BATCH_SIZE,
        npoints=NUM_POINT,
Beispiel #2
0
BN_DECAY_DECAY_STEP = float(config.decay_step)
BN_DECAY_CLIP = 0.99

# Shapenet official train/test split
if config.normal:
    CHANNELS = 6
    assert (config.num_points <= 2048)
    """TRAIN_FILES = os.path.join(config.data, 'train_files.txt')
    TEST_FILES = os.path.join(config.data, 'test_files.txt')
    TRAIN_DATASET = modelnet_h5_dataset.ModelNetH5Dataset(TRAIN_FILES, batch_size=config.batch_size, npoints=config.num_points, shuffle=True, normal_channel=True)
    TEST_DATASET = modelnet_h5_dataset.ModelNetH5Dataset(TEST_FILES, batch_size=config.batch_size, npoints=config.num_points, shuffle=False, normal_channel=True)"""
    assert (config.num_points <= 10000)
    DATA_PATH = os.path.join(config.data, 'data/modelnet40_normal_resampled')
    TRAIN_DATASET = modelnet_dataset.ModelNetDataset(
        root=DATA_PATH,
        npoints=config.num_points,
        split='train',
        normal_channel=config.normal,
        batch_size=config.batch_size)
    TEST_DATASET = modelnet_dataset.ModelNetDataset(
        root=DATA_PATH,
        npoints=config.num_points,
        split='test',
        normal_channel=config.normal,
        batch_size=config.batch_size)
else:
    assert (config.num_points <= 2048)
    CHANNELS = 3
    TRAIN_FILES = os.path.join(config.data, 'train_files.txt')
    TEST_FILES = os.path.join(config.data, 'test_files.txt')
    TRAIN_DATASET = modelnet_h5_dataset.ModelNetH5Dataset(
        TRAIN_FILES,
Beispiel #3
0

BN_INIT_DECAY = 0.5
BN_DECAY_DECAY_RATE = 0.5
BN_DECAY_DECAY_STEP = float(DECAY_STEP)
BN_DECAY_CLIP = 0.99

HOSTNAME = socket.gethostname()

NUM_CLASSES = 40

# Shapenet official train/test split
if NORMAL_FLAG:
	assert(NUM_POINT<=10000)
	DATA_PATH = os.path.join(ROOT_DIR, 'data/modelnet40_normal_resampled')
	TRAIN_DATASET = modelnet_dataset.ModelNetDataset(root=DATA_PATH, npoints=NUM_POINT, split='train', normal_channel=NORMAL_FLAG,
		batch_size=BATCH_SIZE, rotate=ROTATE_FLAG)
	TEST_DATASET = modelnet_dataset.ModelNetDataset(root=DATA_PATH, npoints=NUM_POINT, split='test', normal_channel=NORMAL_FLAG,
		batch_size=BATCH_SIZE)
else:
	assert(NUM_POINT<=2048)
	TRAIN_DATASET = modelnet_h5_dataset.ModelNetH5Dataset(os.path.join(BASE_DIR, 'data/modelnet40_ply_hdf5_2048/train_files.txt'),
		batch_size=BATCH_SIZE, npoints=NUM_POINT, shuffle=True, rotate=ROTATE_FLAG)
	TEST_DATASET = modelnet_h5_dataset.ModelNetH5Dataset(os.path.join(BASE_DIR, 'data/modelnet40_ply_hdf5_2048/test_files.txt'),
		batch_size=BATCH_SIZE, npoints=NUM_POINT, shuffle=False)

def log_string(out_str):
	LOG_FOUT.write(out_str+'\n')
	LOG_FOUT.flush()
	print(out_str)

Beispiel #4
0
BN_INIT_DECAY = 0.5
BN_DECAY_DECAY_RATE = 0.5
BN_DECAY_DECAY_STEP = float(DECAY_STEP)
BN_DECAY_CLIP = 0.99

HOSTNAME = socket.gethostname()

# Shapenet official train/test split

if FLAGS.normal:
    assert (NUM_POINT <= 10000)
    DATA_PATH = os.path.join(ROOT_DIR, 'data/modelnet40_normal_resampled')
    TRAIN_DATASET = modelnet_dataset.ModelNetDataset(root=DATA_PATH,
                                                     npoints=NUM_POINT,
                                                     split='train',
                                                     normal_channel=False,
                                                     modelnet10=True,
                                                     batch_size=BATCH_SIZE,
                                                     unsupervised=True)
    TEST_DATASET = modelnet_dataset.ModelNetDataset(root=DATA_PATH,
                                                    npoints=NUM_POINT,
                                                    split='test',
                                                    normal_channel=False,
                                                    modelnet10=True,
                                                    batch_size=BATCH_SIZE)
else:
    assert (NUM_POINT <= 2048)
    TRAIN_DATASET = modelnet_h5_dataset.ModelNetH5Dataset(
        os.path.join(BASE_DIR,
                     'data/modelnet40_ply_hdf5_2048/train_files.txt'),
        batch_size=BATCH_SIZE,