Example #1
0
                           num_workers=args.num_workers,
                           pin_memory=True,
                           collate_fn=my_collate)
test_loader = DataLoader(test_data, batch_size=32, collate_fn=my_collate)
print('Real: {0} \t Render: {1} \t Test: {2}'.format(len(real_loader),
                                                     len(render_loader),
                                                     len(test_loader)))

if np.isinf(args.max_iterations):
    max_iterations = min(len(real_loader), len(render_loader))
else:
    max_iterations = args.max_iterations

# my_model
if not args.multires:
    model = OneBinDeltaModel(args.feature_network, num_classes, num_clusters,
                             args.N0, args.N1, args.N2, ndim)
else:
    model = ProbabilisticOneDeltaPerBinModel(args.feature_network, num_classes,
                                             num_clusters, args.N0, args.N1,
                                             args.N2, args.N3, ndim)

# print(model)
# loss and optimizer
optimizer = optim.Adam(model.parameters(), lr=args.init_lr)
scheduler = optim.lr_scheduler.StepLR(optimizer, step_size=1, gamma=0.1)
# store stuff
writer = SummaryWriter(log_dir)
count = 0
val_loss = []

                         collate_fn=my_collate)
render_loader = DataLoader(render_data,
                           batch_size=args.num_workers,
                           shuffle=True,
                           num_workers=args.num_workers,
                           pin_memory=True,
                           collate_fn=my_collate)
test_loader = DataLoader(test_data, batch_size=32)
print('Real: {0} \t Render: {1} \t Test: {2}'.format(len(real_loader),
                                                     len(render_loader),
                                                     len(test_loader)))
max_iterations = min(len(real_loader), len(render_loader))

# my_model
if not args.multires:
    orig_model = OneBinDeltaModel(args.feature_network, num_classes,
                                  num_clusters, N0, N1, N2, ndim)
else:
    orig_model = OneDeltaPerBinModel(args.feature_network, num_classes,
                                     num_clusters, N0, N1, N2, N3, ndim)


class JointCatPoseModel(nn.Module):
    def __init__(self, oracle_model):
        super().__init__()
        # old stuff
        self.num_classes = oracle_model.num_classes
        self.num_clusters = oracle_model.num_clusters
        self.ndim = oracle_model.ndim
        self.feature_model = oracle_model.feature_model
        self.bin_models = oracle_model.bin_models
        self.res_models = oracle_model.res_models
Example #3
0
# DATA
# datasets
real_data = GBDGeneratorQ(train_path, 'real', kmeans_file)
render_data = GBDGeneratorQ(render_path, 'render', kmeans_file)
test_data = TestImages(test_path, 'quaternion')
# setup data loaders
real_loader = DataLoader(real_data, batch_size=args.num_workers, shuffle=True, num_workers=args.num_workers, pin_memory=True, collate_fn=my_collate)
render_loader = DataLoader(render_data, batch_size=args.num_workers, shuffle=True, num_workers=args.num_workers, pin_memory=True, collate_fn=my_collate)
test_loader = DataLoader(test_data, batch_size=32)
print('Real: {0} \t Render: {1} \t Test: {2}'.format(len(real_loader), len(render_loader), len(test_loader)))

max_iterations = len(real_loader)

# my_model
if not args.multires:
	model = OneBinDeltaModel(args.feature_network, num_classes, num_clusters, N0, N1, N2, ndim)
else:
	model = OneDeltaPerBinModel(args.feature_network, num_classes, num_clusters, N0, N1, N2, N3, ndim)
model.load_state_dict(torch.load(model_file))
# print(model)
optimizer = mySGD(model.parameters(), c=2*len(real_loader))
writer = SummaryWriter(log_dir)
count = 0
val_loss = []
s = 0
num_ensemble = 0


def training():
	global count, val_loss, s, num_ensemble
	model.train()
model_file = os.path.join('models', args.save_str + '.tar')
results_file = os.path.join('results', args.save_str + '_dets')

# kmeans data
kmeans_file = 'data/kmeans_dictionary_axis_angle_' + str(args.dict_size) + '.pkl'
kmeans = pickle.load(open(kmeans_file, 'rb'))
kmeans_dict = kmeans.cluster_centers_
num_clusters = kmeans.n_clusters

# relevant variables
ndim = 3
num_classes = len(classes)

# my_model
if not args.multires:
	model = OneBinDeltaModel(args.feature_network, num_classes, num_clusters, args.N0, args.N1, args.N2, ndim)
else:
	model = OneDeltaPerBinModel(args.feature_network, num_classes, num_clusters, args.N0, args.N1, args.N2, args.N3, ndim)
# load model
model.load_state_dict(torch.load(model_file))


def testing():
	model.eval()
	ypred = []
	bbox = []
	labels = []
	for i in range(len(test_data)):
		sample = test_data[i]
		xdata = Variable(sample['xdata'].cuda())
		label = Variable(sample['label'].cuda())
# relevant variables
ndim, num_classes = 3, 12
N0, N1, N2, N3 = 2048, 1000, 500, 100
if args.db_type == 'clean':
    db_path = 'data/flipped_new'
else:
    db_path = 'data/flipped_all'
test_path = os.path.join(db_path, 'test')

# DATA
test_data = TestImages(test_path)
test_loader = DataLoader(test_data, batch_size=32)

# my_model
if not args.multires:
    orig_model = OneBinDeltaModel(args.feature_network, num_classes,
                                  num_clusters, N0, N1, N2, ndim)
else:
    orig_model = OneDeltaPerBinModel(args.feature_network, num_classes,
                                     num_clusters, N0, N1, N2, N3, ndim)


class JointCatPoseModel(nn.Module):
    def __init__(self, oracle_model):
        super().__init__()
        # old stuff
        self.num_classes = oracle_model.num_classes
        self.num_clusters = oracle_model.num_clusters
        self.ndim = oracle_model.ndim
        self.feature_model = oracle_model.feature_model
        self.bin_models = oracle_model.bin_models
        self.res_models = oracle_model.res_models
		return y


if args.model_type == 'bd' or args.model_type == 'c':
	# kmeans data
	kmeans_file = 'data/kmeans_dictionary_axis_angle_' + str(args.dict_size) + '.pkl'
	kmeans = pickle.load(open(kmeans_file, 'rb'))
	kmeans_dict = kmeans.cluster_centers_
	num_clusters = kmeans.n_clusters

	if args.model_type == 'c':
		model = ClassificationModel(num_clusters)
	else:
		# my_model
		if not args.multires:
			model = OneBinDeltaModel(args.feature_network, num_classes, num_clusters, args.N0, args.N1, args.N2, ndim)
		else:
			model = OneDeltaPerBinModel(args.feature_network, num_classes, num_clusters, args.N0, args.N1, args.N2, args.N3, ndim)
else:
	model = RegressionModel()

# load model
model_file = os.path.join('models', args.save_str + '.tar')
model.load_state_dict(torch.load(model_file))


def testing(det_path):
	test_data = DetImages(det_path)
	model.eval()
	ypred = []
	bbox = []