Esempio n. 1
0
os.system('cp scannet_dataset.py %s' % (LOG_DIR)) # bkp of dataset class
LOG_FOUT = open(os.path.join(LOG_DIR, 'log_train.txt'), 'w')
LOG_FOUT.write(str(FLAGS)+'\n')

BN_INIT_DECAY = 0.5
BN_DECAY_DECAY_RATE = 0.5
BN_DECAY_DECAY_STEP = float(DECAY_STEP)
BN_DECAY_CLIP = 0.99

HOSTNAME = socket.gethostname()

# Shapenet official train/test split
# DATA_PATH = os.path.join(ROOT_DIR,'data','scannet_data_pointnet2')
DATA_PATH = '/home/markus/thesis/data'

TRAIN_DATASET = scannet_dataset.ScannetDataset(root=DATA_PATH, data_base_name=DATA_BASE_NAME, npoints=NUM_POINT, voxel_size=VOX_SIZE, split='train')

VAL_DATASET = scannet_dataset.ScannetDataset(root=DATA_PATH, npoints=NUM_POINT, data_base_name=DATA_BASE_NAME, voxel_size=VOX_SIZE, split='val')
VAL_DATASET_WHOLE_SCENE = scannet_dataset.ScannetDatasetWholeScene(root=DATA_PATH, npoints=NUM_POINT, data_base_name=DATA_BASE_NAME, voxel_size=VOX_SIZE, split='val')

assert TRAIN_DATASET.nr_classes == VAL_DATASET.nr_classes, 'Number of classes detected is different between training and testing set'
assert TRAIN_DATASET.nr_classes == VAL_DATASET_WHOLE_SCENE.nr_classes, 'Number of classes detected is different between training and whole scene testing set'

# Use hardcoded number of classes if previous assertions find something wrong
# NUM_CLASSES = 19
NUM_CLASSES = TRAIN_DATASET.nr_classes

def log_string(out_str):
    LOG_FOUT.write(out_str+'\n')
    LOG_FOUT.flush()
    print(out_str)
Esempio n. 2
0
import os
import sys
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
ROOT_DIR = BASE_DIR

sys.path.append(BASE_DIR)  # model
sys.path.append(ROOT_DIR)  # provider
sys.path.append(os.path.join(ROOT_DIR, 'utils'))
sys.path.append(os.path.join(ROOT_DIR, 'data_prep'))
import scannet_dataset

root = sys.argv[1]
split = sys.argv[2]
chunks = int(sys.argv[3])
use_whole = int(sys.argv[4])
output_dir = sys.argv[5]
scene = sys.argv[6]
if not os.path.exists(output_dir):
    os.mkdir(output_dir)

dataset = scannet_dataset.ScannetDataset(root=root,
                                         npoints=8192,
                                         split=split,
                                         chunks=chunks,
                                         use_color=2,
                                         use_conv=0,
                                         use_geodesic=1,
                                         use_whole=use_whole,
                                         output_dir=output_dir,
                                         scene=scene)
dataset.ExtractChunks()