Exemple #1
0
print("------Building model and loading-------")

rotmodel = RotationEstimateNN(NUM_CLASS).to(device)
rotmodel.load_state_dict(torch.load(os.path.join(BASE_DIR, 'models', '1_rotation_cls_entropy_box_residual_25_11:49:26', '130')))
# rotmodel.load_state_dict(torch.load(os.path.join(BASE_DIR, 'models', 'rotation_cls_entropy_box_new_19_12:31:03', '1195')))
# rotmodel.load_state_dict(torch.load(os.path.join(BASE_DIR, 'models', '1_huber_rotation_cls_entropy_box_residual_20_10:10:40', '160')))

print("------Successfully Built model-------")



TRAIN_FILES = provider.getDataFiles(os.path.join(BASE_DIR, 'data/threeclass/train_files.txt'))
TEST_FILES = provider.getDataFiles(os.path.join(BASE_DIR, 'data/threeclass/test_files.txt'))

train_dataset= provider.PCDDataset(BASE_DIR, "train", None, pre_transform= False)
test_dataset= provider.PCDDataset(BASE_DIR, "test", None, pre_transform= False)

train_loader = DataLoader(train_dataset, batch_size=BATCH_SIZE, shuffle=True)
test_loader = DataLoader(test_dataset, batch_size=BATCH_SIZE, shuffle=False)

print(train_dataset)
print(test_dataset)

def to_residual(angle):
    return angle%30

def to_cls(angle):
    return (( (angle) // 30)%12).long()

train_losses = []
print("------Successfully Built model-------")

model_save_dir = os.path.join(BASE_DIR, "models",
                              "center_angle_" + str(NUM_POINT))
log_save_dir = os.path.join(BASE_DIR, "logs", "center_angle")
os.makedirs(model_save_dir, exist_ok=True)
os.makedirs(log_save_dir, exist_ok=True)

TRAIN_FILES = provider.getDataFiles(
    os.path.join(BASE_DIR, 'data/threeclass/train_files.txt'))
TEST_FILES = provider.getDataFiles(
    os.path.join(BASE_DIR, 'data/threeclass/test_files.txt'))

# pre_transform, transform = T.NormalizeScale(), T.SamplePoints(NUM_POINT)
train_dataset = provider.PCDDataset(BASE_DIR,
                                    "train",
                                    None,
                                    pre_transform=True)
# print(train_dataset.ang_m, train_dataset.ang_range , train_dataset.ctr_m , train_dataset.ctr_range)
test_dataset = provider.PCDDataset(BASE_DIR,
                                   "test",
                                   None,
                                   pre_transform=True,
                                   data_params={
                                       'ctr_m': train_dataset.ctr_m,
                                       'ctr_std': train_dataset.ctr_std
                                   })
train_loader = DataLoader(train_dataset, batch_size=BATCH_SIZE, shuffle=True)
test_loader = DataLoader(test_dataset, batch_size=32, shuffle=False)

# optimizer = torch.optim.SGD(model.parameters(), lr = 0.001, momentum = 0.9)
optimizer = torch.optim.Adam(cemodel.parameters(), lr=0.001)