Ejemplo n.º 1
0
                    default=['chair'],
                    help='list of object classes to use')
parser.add_argument('-vis',
                    action='store_true',
                    help='Visualize each model while evaluating')
parser.add_argument('-batchsize', type=int, default=16, help='Batch size.')
args = parser.parse_args()

# Data
valid_set = ShapeNet_ODMS(root ='../../datasets/',categories = args.categories, \
 download = True, train = False, high = 128, low = 32, split=.97, voxels = True)
dataloader_val = DataLoader(valid_set, batch_size=args.batchsize, shuffle=False, \
 num_workers=8)

# Model
model = upscale(128, 32)
model = model.to(args.device)
# Load saved weights
model.load_state_dict(torch.load('log/{0}/best.pth'.format(args.expid)))

iou_epoch = 0.
iou_NN_epoch = 0.
num_batches = 0

model.eval()
with torch.no_grad():
    for data in tqdm(dataloader_val):

        tgt_odms = data['odms_128'].to(args.device)
        tgt_voxels = data['voxels_128'].to(args.device)
        inp_odms = data['odms_32'].to(args.device)
Ejemplo n.º 2
0
"""
train_set = ModelNet_ODMS(root ='../../datasets/',categories = args.categories,  \
 download = True)
dataloader_train = DataLoader(train_set,
                              batch_size=args.batchsize,
                              shuffle=True,
                              num_workers=8)

valid_set = ModelNet_ODMS(root ='../../datasets/',categories = args.categories, \
 download = True, train = False)
dataloader_val = DataLoader(valid_set, batch_size=args.batchsize, shuffle=False, \
 num_workers=8)
"""
Model settings 
"""
model = upscale(30, 15).to(args.device)

loss_fn = torch.nn.MSELoss()

optimizer = optim.Adam(model.parameters(), lr=args.lr)

# Create log directory, if it doesn't already exist
args.logdir = os.path.join(args.logdir, args.expid)
if not os.path.isdir(args.logdir):
    os.makedirs(args.logdir)
    print('Created dir:', args.logdir)

# Log all commandline args
with open(os.path.join(args.logdir, 'args.txt'), 'w') as f:
    json.dump(args.__dict__, f, indent=2)
Ejemplo n.º 3
0
"""
Dataset
"""
train_set = ShapeNet_ODMS(root ='../../datasets/',categories = args.categories,  \
 download = True, high = 128, low = 32, split=.97, voxels = True)
dataloader_train = DataLoader(train_set, batch_size=args.batchsize, shuffle=True, \
 num_workers=8)

valid_set = ShapeNet_ODMS(root ='../../datasets/',categories = args.categories, \
 download = True, train = False, high = 128, low = 32, split=.97, voxels = True)
dataloader_val = DataLoader(valid_set, batch_size=args.batchsize, shuffle=False, \
 num_workers=8)
"""
Model settings 
"""
model = upscale(128, 32).to(args.device)

loss_fn = torch.nn.MSELoss()

optimizer = optim.Adam(model.parameters(), lr=args.lr)

# Create log directory, if it doesn't already exist
args.logdir = os.path.join(args.logdir, args.expid)
if not os.path.isdir(args.logdir):
    os.makedirs(args.logdir)
    print('Created dir:', args.logdir)

# Log all commandline args
with open(os.path.join(args.logdir, 'args.txt'), 'w') as f:
    json.dump(args.__dict__, f, indent=2)
Ejemplo n.º 4
0
parser.add_argument('-categories',
                    type=str,
                    nargs='+',
                    default=['chair'],
                    help='list of object classes to use')
parser.add_argument('-batchsize', type=int, default=16, help='Batch size.')
args = parser.parse_args()

# Data
valid_set = ModelNet_ODMS(root ='../../datasets/',categories = ['chair'], \
 download = True, train = False)
dataloader_val = DataLoader(valid_set, batch_size=args.batchsize, shuffle=False, \
 num_workers=8)

# Model
model_res = upscale(30, 15)
model_res = model_res.to(args.device)
model_occ = upscale(30, 15)
model_occ = model_occ.to(args.device)
# Load saved weights
model_res.load_state_dict(torch.load('log/{0}/resbest.pth'.format(args.expid)))
model_occ.load_state_dict(torch.load('log/{0}/occbest.pth'.format(args.expid)))

iou_epoch = 0.
iou_NN_epoch = 0.
num_batches = 0

model_res.eval()
model_occ.eval()
with torch.no_grad():
    for data in tqdm(dataloader_val):
Ejemplo n.º 5
0
parser.add_argument('-expid', type=str, default='MVD', help='Unique experiment identifier.')
parser.add_argument('-device', type=str, default='cuda', help='Device to use')
parser.add_argument('-categories', type=str,nargs='+', default=['chair'], help='list of object classes to use')
parser.add_argument('-vis', action='store_true', help='Visualize each model while evaluating')
parser.add_argument('-batchsize', type=int, default=16, help='Batch size.')
args = parser.parse_args()

# Data
valid_set = ShapeNet_ODMS(root ='../..//datasets',categories = args.categories, \
	download = True, train = False, high = 128, low = 32, split=.97, voxels = True)
dataloader_val = DataLoader(valid_set, batch_size=args.batchsize, shuffle=False, \
	num_workers=8)


# Model
model_res = upscale(128,32)
model_res = model_res.to(args.device)
model_occ = upscale(128,32)
model_occ = model_occ.to(args.device)
# Load saved weights
model_res.load_state_dict(torch.load('log/{0}/resbest.pth'.format(args.expid)))
model_occ.load_state_dict(torch.load('log/{0}/occbest.pth'.format(args.expid)))


iou_epoch = 0.
iou_NN_epoch = 0.
num_batches = 0


model_res.eval()
model_occ.eval()