ms = MeanShift() dataset = Dataset(config.batch_size, config.num_train, config.num_val, config.num_test, normals=True, primitives=True, if_train_data=False, prefix=userspace) get_test_data = dataset.get_test(align_canonical=True, anisotropic=False, if_normal_noise=True) loader = generator_iter(get_test_data, int(1e10)) get_test_data = iter( DataLoader( loader, batch_size=1, shuffle=False, collate_fn=lambda x: x, num_workers=0, pin_memory=False, )) os.makedirs(userspace + "logs/results/{}/results/".format(config.pretrain_model_path), exist_ok=True) evaluation = Evaluation()
dataset = DataSetControlPointsPoisson( config.dataset_path, config.batch_size, splits=split_dict, size_v=config.grid_size, size_u=config.grid_size) get_train_data = dataset.load_train_data( if_regular_points=True, align_canonical=align_canonical, anisotropic=anisotropic, if_augment=if_augment ) get_val_data = dataset.load_val_data( if_regular_points=True, align_canonical=align_canonical, anisotropic=anisotropic ) loader = generator_iter(get_train_data, int(1e10)) get_train_data = iter( DataLoader( loader, batch_size=1, shuffle=False, collate_fn=lambda x: x, num_workers=0, pin_memory=False, ) ) loader = generator_iter(get_val_data, int(1e10)) get_val_data = iter( DataLoader( loader,