Esempio n. 1
0
model_file = os.path.join('models', args.save_str + '.tar')
plots_file = os.path.join('plots', args.save_str)
log_dir = os.path.join('logs', args.save_str)

# kmeans data
kmeans_file = 'data/kmeans_dictionary_axis_angle_' + str(args.dict_size) + '.pkl'
kmeans = pickle.load(open(kmeans_file, 'rb'))
kmeans_dict = kmeans.cluster_centers_
num_clusters = kmeans.n_clusters

# relevant variables
ndim = 3
num_classes = len(classes)

criterion1 = SimpleLoss(args.alpha)
criterion2 = GeodesicLoss(args.alpha, kmeans_file, geodesic_loss().cuda())

# DATA
# datasets
real_data = GBDGenerator(args.augmented_path, 'real', kmeans_file)
render_data = GBDGenerator(args.render_path, 'render', kmeans_file)
test_data = Pascal3dAll(args.pascal3d_path, 'test')
# setup data loaders
real_loader = DataLoader(real_data, batch_size=args.num_workers, shuffle=True, num_workers=args.num_workers, pin_memory=True, collate_fn=my_collate)
render_loader = DataLoader(render_data, batch_size=args.num_workers, shuffle=True, num_workers=args.num_workers, pin_memory=True, collate_fn=my_collate)
test_loader = DataLoader(test_data, batch_size=32, collate_fn=my_collate)
print('Real: {0} \t Render: {1} \t Test: {2}'.format(len(real_loader), len(render_loader), len(test_loader)))

if np.isinf(args.max_iterations):
	max_iterations = min(len(real_loader), len(render_loader))
else:
# relevant variables
ndim = 3
N0, N1, N2, N3 = 2048, 1000, 500, 100
num_classes = len(classes)
if args.db_type == 'clean':
    db_path = 'data/flipped_new'
else:
    db_path = 'data/flipped_all'
num_classes = len(classes)
real_path = os.path.join(db_path, 'train')
render_path = 'data/renderforcnn'
test_path = os.path.join(db_path, 'test')

# loss
ce_loss = nn.CrossEntropyLoss().cuda()
gve_loss = geodesic_loss().cuda()

# DATA
# datasets
real_data = GBDGenerator(real_path, 'real', kmeans_file)
render_data = GBDGenerator(render_path, 'render', kmeans_file)
test_data = TestImages(test_path)
# setup data loaders
real_loader = DataLoader(real_data,
                         batch_size=args.num_workers,
                         shuffle=True,
                         num_workers=args.num_workers,
                         pin_memory=True,
                         collate_fn=my_collate)
render_loader = DataLoader(render_data,
                           batch_size=args.num_workers,
Esempio n. 3
0
# gmm data
gmm_file = 'data/gmm_dictionary_axis_angle_' + str(args.dict_size) + '.pkl'
gmm = pickle.load(open(gmm_file, 'rb'))
gmm_dict = gmm.means_
num_clusters = gmm.n_components
cluster_centers = Variable(torch.from_numpy(gmm_dict).float()).cuda()

# relevant variables
ndim = 3
num_classes = len(classes)
N0, N1, N2, N3 = 2048, 1000, 500, 100

# loss
kl_div = nn.KLDivLoss().cuda()
gve_loss = geodesic_loss(reduce=False).cuda()

# DATA
if args.db_type == 'clean':
    db_path = 'data/flipped_new'
else:
    db_path = 'data/flipped_all'
num_classes = len(classes)
train_path = os.path.join(db_path, 'train')
test_path = os.path.join(db_path, 'test')
render_path = 'data/renderforcnn/'
# datasets
real_data = XPBDGenerator(train_path, 'real', gmm_file)
render_data = XPBDGenerator(render_path, 'render', gmm_file)
test_data = TestImages(test_path)
# setup data loaders
Esempio n. 4
0
kmeans = pickle.load(open(kmeans_file, 'rb'))
kmeans_dict = kmeans.cluster_centers_
num_clusters = kmeans.n_clusters

# relevant variables
ndim = 3
num_workers = 4
N0 = 2048
N1 = 1000
N2 = 500
num_classes = len(classes)
init_lr = 0.0001
num_epochs = 3

problem_type = 'm3'
criterion = loss_m3(1.0, kmeans_file, geodesic_loss(reduce=False).cuda())

# DATA
# datasets
real_data = MultibinImages(augmented_path, 'real', problem_type, kmeans_file)
render_data = MultibinImages(render_path, 'render', problem_type, kmeans_file)
test_data = Pascal3dAll(pascal3d_path, 'test')
# setup data loaders
real_loader = DataLoader(real_data,
                         batch_size=4,
                         shuffle=True,
                         num_workers=num_workers,
                         pin_memory=True,
                         collate_fn=my_collate)
render_loader = DataLoader(render_data,
                           batch_size=4,
Esempio n. 5
0
model_file = os.path.join('models', args.save_str + '.tar')
plots_file = os.path.join('plots', args.save_str)
log_dir = os.path.join('logs', args.save_str)

# kmeans data
kmeans_file = 'data/kmeans_dictionary_axis_angle_' + str(args.dict_size) + '.pkl'
kmeans = pickle.load(open(kmeans_file, 'rb'))
kmeans_dict = kmeans.cluster_centers_
num_clusters = kmeans.n_clusters

# relevant variables
ndim = 3
num_classes = len(classes)

if not args.multires:
	criterion = ProbabilisticLoss(args.alpha, kmeans_file, geodesic_loss(reduce=False).cuda())
else:
	criterion = ProbabilisticMultiresLoss(args.alpha, kmeans_file, geodesic_loss(reduce=False).cuda())

# DATA
# datasets
real_data = GBDGenerator(args.augmented_path, 'real', kmeans_file)
render_data = GBDGenerator(args.render_path, 'render', kmeans_file)
test_data = Pascal3dAll(args.pascal3d_path, 'test')
# setup data loaders
real_loader = DataLoader(real_data, batch_size=args.num_workers, shuffle=True, num_workers=args.num_workers, pin_memory=True, collate_fn=my_collate)
render_loader = DataLoader(render_data, batch_size=args.num_workers, shuffle=True, num_workers=args.num_workers, pin_memory=True, collate_fn=my_collate)
test_loader = DataLoader(test_data, batch_size=32, collate_fn=my_collate)
print('Real: {0} \t Render: {1} \t Test: {2}'.format(len(real_loader), len(render_loader), len(test_loader)))

if np.isinf(args.max_iterations):
Esempio n. 6
0
def save_checkpoint(filename):
    torch.save(model.state_dict(), filename)


# MSE minimization
criterion = nn.MSELoss().cuda()
training(True)
ytest, yhat_test, _ = testing()
get_error(ytest, yhat_test)
train_mse = train_loss

# GVE Minimization
train_loss = []
train_loss_sum = 0.0
train_samples = 0
criterion = geodesic_loss().cuda()
for epoch in range(num_epochs):
    tic = time.time()
    scheduler.step()
    # training step
    training(True)
    # save model at end of epoch
    save_checkpoint(model_file)
    # time and output
    toc = time.time() - tic
    print('Epoch: {0} in time {1}s'.format(epoch, toc))
    # cleanup
    gc.collect()
# save plots
spio.savemat(plots_file, {'train_loss': train_loss, 'train_mse': train_mse})
num_clusters = kmeans.n_clusters

# relevant variables
ndim = 3
num_workers = 4
N0 = 2048
N1 = 1000
N2 = 500
N3 = 100
num_classes = len(classes)
init_lr = 0.0001
num_epochs = 3

problem_type = 'm1'
criterion1 = loss_m0(1.0)
criterion2 = loss_m1(1.0, kmeans_file, geodesic_loss().cuda())

# DATA
# datasets
real_data = MultibinImages(augmented_path, 'real', problem_type, kmeans_file)
print(real_data)
test_data = Pascal3dAll(pascal3d_path, 'test')
# setup data loaders
real_loader = DataLoader(real_data,
                         batch_size=4,
                         shuffle=True,
                         num_workers=num_workers,
                         pin_memory=True,
                         collate_fn=my_collate)
test_loader = DataLoader(test_data, batch_size=8, collate_fn=my_collate)
print('Real: {0} \t Test: {1}'.format(len(real_loader), len(test_loader)))
# kmeans data
kmeans_file = 'data/kmeans_dictionary_axis_angle_' + str(
    args.dict_size) + '.pkl'
kmeans = pickle.load(open(kmeans_file, 'rb'))
kmeans_dict = kmeans.cluster_centers_
num_clusters = kmeans.n_clusters
gamma = get_gamma(kmeans_dict)
print('Gamma: ', gamma)

# relevant variables
ndim = 3
num_classes = len(classes)

criterion1 = SimpleRelaXedLoss(1.0)
criterion2 = RelaXedLoss(1.0, kmeans_file, geodesic_loss().cuda())

# DATA
# datasets
real_data = XPBDGenerator(args.augmented_path, 'real', kmeans_file, gamma)
render_data = XPBDGenerator(args.render_path, 'render', kmeans_file, gamma)
test_data = Pascal3dAll(args.pascal3d_path, 'test')
# setup data loaders
real_loader = DataLoader(real_data,
                         batch_size=args.num_workers,
                         shuffle=True,
                         num_workers=args.num_workers,
                         pin_memory=True,
                         collate_fn=my_collate)
render_loader = DataLoader(render_data,
                           batch_size=args.num_workers,
log_dir = os.path.join('logs', args.save_str)

# kmeans data
kmeans_file = 'data/kmeans_dictionary_axis_angle_' + str(
    args.dict_size) + '.pkl'
kmeans = pickle.load(open(kmeans_file, 'rb'))
kmeans_dict = kmeans.cluster_centers_
num_clusters = kmeans.n_clusters

# relevant variables
ndim = 3
num_classes = len(classes)

if not args.multires:
    criterion = RelaXedProbabilisticLoss(1.0, kmeans_file,
                                         geodesic_loss(reduce=False).cuda())
else:
    criterion = RelaXedProbabilisticMultiresLoss(
        1.0, kmeans_file,
        geodesic_loss(reduce=False).cuda())

# DATA
# datasets
real_data = XPBDGenerator(args.augmented_path, 'real', kmeans_file)
render_data = XPBDGenerator(args.render_path, 'render', kmeans_file)
test_data = Pascal3dAll(args.pascal3d_path, 'test')
# setup data loaders
real_loader = DataLoader(real_data,
                         batch_size=args.num_workers,
                         shuffle=True,
                         num_workers=args.num_workers,