import torch.nn.functional as F

# Hyper parameters
# path to shapenet
datapath = './shapenetcore_partanno_segmentation_benchmark_v0/'
# resample to some points
num_points = 2500
# how many inputs you load to NN at the same time
batchsize = 32
# how many epoch to iterate
epochsize = 250
# workers
workers = 6

# load shapenet datasets
dataset = ShapeNetDataset(datapath, classification=True, npoints=num_points)
testset = ShapeNetDataset(datapath, classification=True, npoints=num_points)

dataloader = torch.utils.data.DataLoader(dataset,
                                         batchsize,
                                         shuffle=True,
                                         num_workers=workers)
testdataloader = torch.utils.data.DataLoader(testset,
                                             batchsize,
                                             shuffle=True,
                                             num_workers=workers)

num_classes = len(dataset.classes)
print(num_classes)

# classifier initialization
    parser.add_argument("--sample_size")
    return parser


if __name__ == "__main__":
    args = get_parser().parse_args()
    logger = setup_logger(name="demo")
    logger.info("Arguments: " + str(args))

    cfg.CHECKPOINT = args.checkpoint

    data_dir = args.data_dir
    synset_id = args.synset_id
    output_dir = args.output_dir

    dataset = ShapeNetDataset(cfg, data_dir)
    synset_ids = dataset.synset_ids
    first_idx = synset_ids.index(synset_id)
    model_num = int(synset_ids.count(synset_id) / 24)

    sample_size = int(args.sample_size)
    torch.manual_seed(0)
    idx_list = torch.randint(0, model_num, size=(sample_size, ))
    idx_list += first_idx

    all_iou = []
    for idx in idx_list:
        item = dataset[
            idx]  # img, verts, faces, points, normals, voxels, P, _imgs, render_RTs, RT, sid, mid, iid
        img = item[0].squeeze()
        imgs = item[7]
Example #3
0
                    default='Chair',
                    help="class_choice")
parser.add_argument('--feature_transform',
                    action='store_true',
                    help="use feature transform")

opt = parser.parse_args()
print(opt)

opt.manualSeed = random.randint(1, 10000)  # fix seed
print("Random Seed: ", opt.manualSeed)
random.seed(opt.manualSeed)
torch.manual_seed(opt.manualSeed)

dataset = ShapeNetDataset(root=opt.dataset,
                          classification=False,
                          class_choice=[opt.class_choice])
dataloader = torch.utils.data.DataLoader(dataset,
                                         batch_size=opt.batchSize,
                                         shuffle=True,
                                         num_workers=int(opt.workers))

test_dataset = ShapeNetDataset(root=opt.dataset,
                               classification=False,
                               class_choice=[opt.class_choice],
                               split='test',
                               data_augmentation=False)
testdataloader = torch.utils.data.DataLoader(test_dataset,
                                             batch_size=opt.batchSize,
                                             shuffle=True,
                                             num_workers=int(opt.workers))
Example #4
0
from dataset import ShapeNetDataset
from model import PointNetCls
import torch.nn.functional as F

parser = argparse.ArgumentParser()
parser.add_argument('--model', type=str, default='', help='model path')
parser.add_argument('--num_points',
                    type=int,
                    default=2500,
                    help='input batch size')
dataset_root = '../shapenet'
opt = parser.parse_args()
print(opt)

test_dataset = ShapeNetDataset(root=dataset_root,
                               classification=True,
                               split='test',
                               npoints=opt.num_points)

train_dataset = ShapeNetDataset(root=dataset_root,
                                classification=True,
                                npoints=opt.num_points)

test_dataloader = torch.utils.data.DataLoader(test_dataset,
                                              batch_size=32,
                                              shuffle=True,
                                              num_workers=8)

train_dataloader = torch.utils.data.DataLoader(train_dataset,
                                               batch_size=32,
                                               shuffle=True,
                                               num_workers=8)
Example #5
0
                    action='store_true',
                    help="use feature transform")

opt = parser.parse_args()
print(opt)

blue = lambda x: '\033[94m' + x + '\033[0m'

opt.manualSeed = random.randint(1, 10000)  # fix seed
print("Random Seed: ", opt.manualSeed)
random.seed(opt.manualSeed)
torch.manual_seed(opt.manualSeed)

if opt.dataset_type == 'shapenet':
    dataset = ShapeNetDataset(root=opt.dataset,
                              classification=True,
                              npoints=opt.num_points)

    test_dataset = ShapeNetDataset(root=opt.dataset,
                                   classification=True,
                                   split='test',
                                   npoints=opt.num_points,
                                   data_augmentation=False)
elif opt.dataset_type == 'modelnet40':
    dataset = ModelNetDataset(root=opt.dataset,
                              npoints=opt.num_points,
                              split='trainval')

    test_dataset = ModelNetDataset(root=opt.dataset,
                                   split='test',
                                   npoints=opt.num_points,
Example #6
0
        clr = (clr - clr.min()) / (clr.max() - clr.min())  # normalization
        sct = ax.scatter(
            pts[:, 0],
            pts[:, 1],
            pts[:, 2],
            c=clr,
            zdir='y',
            s=sz,
            cmap=cmap,
            # depthshade=False,
            edgecolors=(0.5, 0.5, 0.5))

    ax.set_axis_off()
    ax.set_facecolor("white")
    return ax, sct


if __name__ == '__main__':
    dataroot = "data/shapenetcore_partanno_segmentation_benchmark_v0"
    dataset = ShapeNetDataset(
        root=dataroot,
        class_choice='Airplane',
        split='train',
        classification=True,
        num_points=2048,
    )
    ax, sct = draw_pts(dataset[random.randint(0, 100)][0],
                       clr=None,
                       cmap='CMRmap')
    ax.figure.show()
parser.add_argument('--dataset', type=str, required=True, help="dataset path")
parser.add_argument('--feature_transform', action='store_true', help="use feature transform")

opt = parser.parse_args()
print(opt)

blue = lambda x: '\033[94m' + x + '\033[0m'

opt.manualSeed = random.randint(1, 10000)  # fix seed
print("Random Seed: ", opt.manualSeed)
random.seed(opt.manualSeed)
torch.manual_seed(opt.manualSeed)


dataset = ShapeNetDataset(
    root=opt.dataset,
    classification=True,
    npoints=opt.num_points)

dataloader = torch.utils.data.DataLoader(
    dataset,
    batch_size=opt.batchSize,
    shuffle=True,
    num_workers=int(opt.workers))

print(len(dataset))
num_classes = len(dataset.classes)
print('classes', num_classes)

try:
    os.makedirs(opt.outf)
except OSError: