예제 #1
0
def train_shape_net(c_in=30, evec=False):
    bs_train, bs_test = 32, 32
    train_files = get_files_list('../data/eigen/eigen_train_files.txt')
    test_files = get_files_list('../data/eigen/eigen_test_files.txt')

    ds_train = EigenNet40Ds(train_files, evec=evec, c_in=c_in)
    ds_test = EigenNet40Ds(test_files, evec=evec, c_in=c_in)

    dl_train = DataLoader(ds_train, bs_train, shuffle=True)
    dl_test = DataLoader(ds_test, bs_test, shuffle=True)

    lr = 1e-4
    l2_reg = 0
    our_model = HearShapeNet(c_in=c_in)
    # our_model = SpectralPointNet()
    loss_fn = F.cross_entropy  # This criterion combines log_softmax and nll_loss in a single function
    optimizer = torch.optim.Adam(our_model.parameters(),
                                 lr=lr,
                                 weight_decay=l2_reg)
    scheduler = torch.optim.lr_scheduler.StepLR(optimizer,
                                                step_size=20,
                                                gamma=0.7)
    trainer = NetTrainer(our_model, loss_fn, optimizer, scheduler)

    expr_name = f'Eigen-t3'
    if os.path.isfile(f'results/{expr_name}.pt'):
        os.remove(f'results/{expr_name}.pt')
    fit_res = trainer.fit(dl_train,
                          dl_test,
                          num_epochs=10000,
                          early_stopping=50,
                          checkpoints=expr_name)
    return
예제 #2
0
def create_dist_eig_dataset(train, num_eigen, num_nbrs, num_points,
                            num_workers):
    if train:
        name = 'train'
    else:
        name = 'test'
    bs = 512
    dest_dir = 'dist_eig_2048'
    pc_files = get_files_list(
        f'../data/modelnet40_ply_hdf5_2048/{name}_files.txt')
    ds = CreateLboEig(pc_files,
                      num_eigen=num_eigen,
                      num_nbrs=num_nbrs,
                      num_points=num_points)
    dl = DataLoader(ds, bs, shuffle=False, num_workers=num_workers)

    files_list = []
    dl_iter = iter(dl)
    num_batches = len(dl.batch_sampler)
    for batch_idx in range(num_batches):
        print(f'Working on file {batch_idx}', flush=True)
        eigen_vec, eigen_val = next(dl_iter)
        print(f'eigen_vec.shape={eigen_vec.shape}')
        print(f'eigen_val.shape={eigen_val.shape}')
        file_name = f'data/{dest_dir}/spectral_data_{name}{batch_idx}.h5'
        files_list.append(file_name)
        with h5py.File('../' + file_name, 'w') as hf:
            hf.create_dataset("eigen_vec", data=eigen_vec)
            hf.create_dataset("eigen_val", data=eigen_val)
    txt_name = f'../data/{dest_dir}/spectral_{name}_files.txt'
    with open(txt_name, 'w') as f:
        f.write('\n'.join(files_list))
    return
예제 #3
0
def create_normals_dataset(train: bool, num_nbrs: int, num_points: int,
                           num_workers: int):
    if train:
        name = 'train'
    else:
        name = 'test'
    bs = 512
    dest_dir = 'normals_5nbr_2048'
    pc_files = get_files_list(
        f'../data/modelnet40_ply_hdf5_2048/{name}_files.txt')
    ds = CreateNormals40Ds(h5_files=pc_files,
                           num_nbrs=num_nbrs,
                           num_points=num_points)
    dl = DataLoader(ds, bs, shuffle=False, num_workers=num_workers)

    files_list = []
    dl_iter = iter(dl)
    num_batches = len(dl.batch_sampler)
    print(
        f'function name={inspect.stack()[0][3]}, dest_dir={dest_dir}, num files={num_batches}'
    )
    for batch_idx in range(num_batches):
        print(f'Working on file {batch_idx}', flush=True)
        normals = next(dl_iter)
        print(f'normals.shape={normals.shape}')
        file_name = f'data/{dest_dir}/normals_data_{name}{batch_idx}.h5'
        files_list.append(file_name)
        with h5py.File('../' + file_name, 'w') as hf:
            hf.create_dataset("normals", data=normals)
    txt_name = f'../data/{dest_dir}/normals_{name}_files.txt'
    with open(txt_name, 'w') as f:
        f.write('\n'.join(files_list))
    return
예제 #4
0
def create_eigen_dataset(train=True, num_eigen=100, num_nbrs=5):
    if train:
        name = 'train'
    else:
        name = 'test'
    bs = 2048
    pc_files = get_files_list(
        f'../data/modelnet40_ply_hdf5_2048/{name}_files.txt')
    ds = CreateEigenLBO(pc_files, num_eigen=num_eigen, num_nbrs=num_nbrs)
    dl = DataLoader(ds, bs, shuffle=False, num_workers=20)

    dl_iter = iter(dl)
    num_batches = len(dl.batch_sampler)
    for batch_idx in range(num_batches):
        print(f'Working on file {batch_idx}', flush=True)
        eigval, eigvec, label = next(dl_iter)
        print(f'eigval.shape={eigval.shape}')
        print(f'eigvec.shape={eigvec.shape}')
        print(f'label.shape={label.shape}')
        with h5py.File(f'../data/eigen/eigen_data_{name}{batch_idx}.h5',
                       'w') as hf:
            hf.create_dataset("eigval", data=eigval)
            hf.create_dataset("eigvec", data=eigvec)
            hf.create_dataset("label", data=label)
    return
예제 #5
0
def time_check():
    lp = LineProfiler()
    lp.add_function(create_distance_mat)  # add additional function to profile
    lp_wrapper = lp(get_item)
    num_points = 1024
    tot_examples = 0
    examples = []
    train = 'train'
    h5_files = get_files_list(
        f'../data/modelnet40_ply_hdf5_2048/{train}_files.txt')
    for h5_file in h5_files:
        current_data, current_label = load_h5(h5_file)
        current_data = current_data[:, 0:num_points, :]
        for i in range(current_data.shape[0]):
            examples.append((current_data[i, :, :], current_label[i, :]))
        tot_examples += current_data.shape[0]
    sd, label = lp_wrapper(examples=examples, index=83)
    lp.print_stats()
    return
예제 #6
0
def train_net(expr_name: str, num_eigen: int, num_nbrs: int):
    num_points = 1024
    bs_train, bs_test = 32, 32
    train_files_pc = get_files_list(
        f'../data/modelnet40_ply_hdf5_2048/train_files.txt')
    test_files_pc = get_files_list(
        f'../data/modelnet40_ply_hdf5_2048/test_files.txt')
    # train_files_pc = get_files_list(f'../data/normals_1024/normals_train_files.txt')
    # test_files_pc = get_files_list(f'../data/normals_1024/normals_test_files.txt')

    train_files_lbo = get_files_list(
        f'../data/lbo_eig_{num_points}/spectral_train_files.txt')
    test_files_lbo = get_files_list(
        f'../data/lbo_eig_{num_points}/spectral_test_files.txt')
    train_files_gdd = get_files_list(
        f'../data/gdd_{num_points}/gdd_train_files.txt')
    test_files_gdd = get_files_list(
        f'../data/gdd_{num_points}/gdd_test_files.txt')

    ds_train = SfmDs(lbo_files=train_files_lbo,
                     pc_files=train_files_pc,
                     num_points=num_points,
                     num_eigen=num_eigen,
                     num_nbrs=num_nbrs,
                     gdd_files=train_files_gdd)
    ds_test = SfmDs(lbo_files=test_files_lbo,
                    pc_files=test_files_pc,
                    num_points=num_points,
                    num_eigen=num_eigen,
                    num_nbrs=num_nbrs,
                    gdd_files=test_files_gdd)

    dl_train = DataLoader(ds_train, bs_train, shuffle=True, num_workers=0)
    dl_test = DataLoader(ds_test, bs_test, shuffle=True, num_workers=0)

    lr = 5e-4
    min_lr = 1e-5
    l2_reg = 0
    model = SfmModel(num_eigen=num_eigen, num_nbs=num_nbrs)
    loss_fn = F.cross_entropy
    optimizer = torch.optim.Adam(model.parameters(),
                                 lr=lr,
                                 weight_decay=l2_reg)
    scheduler = torch.optim.lr_scheduler.StepLR(optimizer,
                                                step_size=20,
                                                gamma=0.7)

    trainer = SfmNetTrainer(model,
                            loss_fn,
                            optimizer,
                            scheduler,
                            min_lr=min_lr,
                            exp_name=expr_name)

    if os.path.isfile(f'results/{expr_name}.pt'):
        os.remove(f'results/{expr_name}.pt')
    _ = trainer.fit(dl_train,
                    dl_test,
                    num_epochs=400,
                    early_stopping=50,
                    checkpoints=expr_name)
    return
예제 #7
0
        print(f'eigen_val.shape={eigen_val.shape}')
        file_name = f'data/{dest_dir}/spectral_data_{name}{batch_idx}.h5'
        files_list.append(file_name)
        with h5py.File('../' + file_name, 'w') as hf:
            hf.create_dataset("eigen_vec", data=eigen_vec)
            hf.create_dataset("eigen_val", data=eigen_val)
    txt_name = f'../data/{dest_dir}/spectral_{name}_files.txt'
    with open(txt_name, 'w') as f:
        f.write('\n'.join(files_list))
    return


if __name__ == '__main__':

    name = 'train'
    pc_files = get_files_list(
        f'../data/modelnet40_ply_hdf5_2048/{name}_files.txt')
    num_nbrs = 5
    num_eigen = 32
    num_points = 2048
    ds = CreateSpectralDist(h5_files=pc_files,
                            num_nbrs=num_nbrs,
                            num_eigen=num_eigen,
                            num_points=num_points)
    for index in range(5):
        pc, label = ds.get_pc(index)
        pc = pc[0:num_points, :]
        src, target, wt = ds.get_knn(pc)
        src, target, wt = ds.connect_graph(pc, src, target, wt)
        L, D = ds.create_laplacian(src, target, wt)
        dist_mat = distance_mat.get_distance_m(num_points, src, target, wt)
        eigen_val_lbo, eigen_vec_lbo = eigh(a=L, b=D, eigvals=(1, num_eigen))
예제 #8
0
        sign1 = np.random.choice([-1, 1], size=self.size)
        sign2 = np.random.choice([-1, 1], size=self.size)
        sign1 = np.reshape(sign1, (self.size, 1))
        sign2 = np.reshape(sign2, (self.size, 1))
        mat = np.multiply(sign1.transpose(), mat)
        mat = np.multiply(mat, sign2)
        return mat

    def add_noise(self, mat):
        noise = np.random.normal(1, 0.0001, (self.size, self.size))
        return mat * noise


matrix_size = 32
bs_train, bs_test = 32, 32
train_files_pc = get_files_list(
    f'../data/modelnet40_ply_hdf5_2048/train_files.txt')
test_files_pc = get_files_list(
    f'../data/modelnet40_ply_hdf5_2048/test_files.txt')

# train_files_lbo = get_files_list('../data/lbo_eig_2048/spectral_train_files.txt')
# test_files_lbo = get_files_list('../data/lbo_eig_2048/spectral_test_files.txt')
#
# train_files_dist = get_files_list('../data/dist_eig_2048/spectral_train_files.txt')
# test_files_dist = get_files_list('../data/dist_eig_2048/spectral_test_files.txt')

# ds_train = Spectral40Ds(train_files, size=matrix_size)
# ds_test = Spectral40Ds(test_files, size=matrix_size)

# ds_train = HybMat40Ds(lbo_files=train_files_lbo, dist_files=train_files_dist, pc_files=train_files_pc, size=matrix_size)

ds_train = training.PicNet40Ds(train_files_pc, num_slices=1)
예제 #9
0
def train_spectral_net(matrix_size):
    bs_train, bs_test = 32, 32
    train_files_pc = get_files_list(
        f'../data/modelnet40_ply_hdf5_2048/train_files.txt')
    test_files_pc = get_files_list(
        f'../data/modelnet40_ply_hdf5_2048/test_files.txt')

    train_files_lbo = get_files_list(
        '../data/lbo_eig_2048/spectral_train_files.txt')
    test_files_lbo = get_files_list(
        '../data/lbo_eig_2048/spectral_test_files.txt')

    train_files_dist = get_files_list(
        '../data/dist_eig_2048/spectral_train_files.txt')
    test_files_dist = get_files_list(
        '../data/dist_eig_2048/spectral_test_files.txt')

    # ds_train = Spectral40Ds(train_files, size=matrix_size)
    # ds_test = Spectral40Ds(test_files, size=matrix_size)

    ds_train = HybMat40Ds(lbo_files=train_files_lbo,
                          dist_files=train_files_dist,
                          pc_files=train_files_pc,
                          size=matrix_size)
    ds_test = HybMat40Ds(lbo_files=test_files_lbo,
                         dist_files=test_files_dist,
                         pc_files=test_files_pc,
                         size=matrix_size)

    # ds_train = SpectralWithEig40Ds(train_files, size=matrix_size)
    # ds_test = SpectralWithEig40Ds(test_files, size=matrix_size)

    dl_train = DataLoader(ds_train, bs_train, shuffle=True, num_workers=4)
    dl_test = DataLoader(ds_test, bs_test, shuffle=True, num_workers=4)

    lr = 1e-4
    min_lr = 5e-6
    l2_reg = 0
    our_model = ResNet(BasicBlock, [2, 2, 2, 2])
    # our_model = SpectralSimpleNet(mat_size=matrix_size)
    # our_model = SpectralSimpleNetBn(mat_size=matrix_size)
    # our_model = SpectralFcNet(c_in=matrix_size)
    # our_model = SpectralConv1dNet(c_in=matrix_size)
    loss_fn = F.cross_entropy
    optimizer = torch.optim.Adam(our_model.parameters(),
                                 lr=lr,
                                 weight_decay=l2_reg)
    scheduler = torch.optim.lr_scheduler.StepLR(optimizer,
                                                step_size=100,
                                                gamma=0.9)
    trainer = NetTrainer(our_model,
                         loss_fn,
                         optimizer,
                         scheduler,
                         min_lr=min_lr)
    # trainer = EigTrainer(our_model, loss_fn, optimizer, scheduler, min_lr=min_lr)

    expr_name = f'Spectral-resnet-brs-lr{lr}-noise'
    if os.path.isfile(f'results/{expr_name}.pt'):
        os.remove(f'results/{expr_name}.pt')
    _ = trainer.fit(dl_train,
                    dl_test,
                    num_epochs=10000,
                    early_stopping=50,
                    checkpoints=expr_name)
    return