Beispiel #1
0
def main():
    time_start = time.time()
    # load data
    train_data, train_label = modelnet_data.data_load(
        num_point=initial_point,
        data_dir=os.path.join(BASE_DIR, 'modelnet40_ply_hdf5_2048'),
        train=True)
    test_data, test_label = modelnet_data.data_load(
        num_point=initial_point,
        data_dir=os.path.join(BASE_DIR, 'modelnet40_ply_hdf5_2048'),
        train=False)

    # validation set
    if VALID:
        train_data, train_label, valid_data, valid_label = modelnet_data.data_separate(
            train_data, train_label)
    else:
        valid_data = test_data
        valid_label = test_label

    print(train_data.shape, train_label.shape, valid_data.shape,
          valid_label.shape)

    if ENSEMBLE:
        angle = np.repeat(angle_rotation, freq_rotation)
    else:
        angle = [0]

    params_total = {}
    feat_train = []
    feat_valid = []
    for i in range(len(angle)):
        log_string('------------Train {} --------------'.format(i))
        params, leaf_node, leaf_node_energy = pointhop.pointhop_train(
            True,
            train_data,
            n_newpoint=num_point,
            n_sample=num_sample,
            threshold=threshold)
        feature_train = pointhop.extract(leaf_node)
        feature_train = np.concatenate(feature_train, axis=-1)
        if FE is not None:
            entropy = pointhop.CE(feature_train, train_label, 40)
            ind = np.argsort(entropy)
            fe_ind = ind[:int(len(ind) * FE)]
            feature_train = feature_train[:, fe_ind]
            params_total['fe_ind:', i] = fe_ind
        weight = pointhop.llsr_train(feature_train, train_label)
        feature_train, pred_train = pointhop.llsr_pred(feature_train, weight)
        feat_train.append(feature_train)
        acc_train = sklearn.metrics.accuracy_score(train_label, pred_train)
        log_string('train accuracy: {}'.format(acc_train))
        params_total['params:', i] = params
        params_total['weight:', i] = weight
        train_data = data_utils.data_augment(train_data, angle[i])

        if VALID:
            log_string('------------Validation {} --------------'.format(i))
            leaf_node_test = pointhop.pointhop_pred(False,
                                                    valid_data,
                                                    pca_params=params,
                                                    n_newpoint=num_point,
                                                    n_sample=num_sample)
            feature_valid = pointhop.extract(leaf_node_test)
            feature_valid = np.concatenate(feature_valid, axis=-1)
            if FE is not None:
                feature_valid = feature_valid[:, fe_ind]
            feature_valid, pred_valid = pointhop.llsr_pred(
                feature_valid, weight)
            acc_valid = sklearn.metrics.accuracy_score(valid_label, pred_valid)
            acc = pointhop.average_acc(valid_label, pred_valid)
            feat_valid.append(feature_valid)
            log_string('val: {} , val mean: {}'.format(acc_valid,
                                                       np.mean(acc)))
            log_string('per-class: {}'.format(acc))
            valid_data = data_utils.data_augment(valid_data, angle[i])

    if ENSEMBLE:
        feat_train = np.concatenate(feat_train, axis=-1)
        weight = pointhop.llsr_train(feat_train, train_label)
        feat_train, pred_train = pointhop.llsr_pred(feat_train, weight)
        acc_train = sklearn.metrics.accuracy_score(train_label, pred_train)
        params_total['weight ensemble'] = weight
        log_string('ensemble train accuracy: {}'.format(acc_train))

        if VALID:
            feat_valid = np.concatenate(feat_valid, axis=-1)
            feat_valid, pred_valid = pointhop.llsr_pred(feat_valid, weight)
            acc_valid = sklearn.metrics.accuracy_score(valid_label, pred_valid)
            acc = pointhop.average_acc(valid_label, pred_valid)
            log_string('ensemble val: {}, ensemble val mean: {}'.format(
                acc_valid, np.mean(acc)))
            log_string('ensemble per-class: {}'.format(acc))

    time_end = time.time()
    log_string('totally time cost is {} minutes'.format(
        (time_end - time_start) // 60))

    with open(os.path.join(LOG_DIR, 'params.pkl'), 'wb') as f:
        pickle.dump(params_total, f)
Beispiel #2
0
def main():
    time_start = time.time()

    # load data
    train_data, train_label = modelnet_data.data_load(num_point=initial_point, data_dir='modelnet40_ply_hdf5_2048', train=True)
    test_data, test_label = modelnet_data.data_load(num_point=initial_point, data_dir='modelnet40_ply_hdf5_2048', train=False)

    # validation set
    if VALID:
        train_data, train_label, valid_data, valid_label = modelnet_data.data_separate(train_data, train_label)
    else:
        valid_data = test_data
        valid_label = test_label

    print(train_data.shape)
    print(valid_data.shape)

    if ENSEMBLE:
        angle = np.repeat(angle_rotation, freq_rotation)
    else:
        angle = [0]

    params = {}
    feat_train = []
    feat_valid = []
    for i in range(len(angle)):
        print('------------Train ', i, '--------------')
        idx_save, new_xyz_save, final_feature_train, feature_train, pca_params = \
            pointhop.pointhop_train(train_data, n_newpoint=num_point, n_sample=num_sample, layer_num=num_filter,
                                    energy_percent=None)
        print('------------Validation ', i, '--------------')

        final_feature_valid, feature_valid = pointhop.pointhop_pred(
            valid_data, pca_params=pca_params, n_newpoint=num_point, n_sample=num_sample, layer_num=num_filter,
            idx_save=None, new_xyz_save=None)

        feature_train = pointhop.extract(feature_train)
        feature_valid = pointhop.extract(feature_valid)
        feat_train.append(feature_train)
        feat_valid.append(feature_valid)
        params['stage %d pca_params' % i] = pca_params

        train_data = data_utils.data_augment(train_data, angle[i])
        valid_data = data_utils.data_augment(valid_data, angle[i])

    feat_train = np.concatenate(feat_train, axis=-1)
    feat_valid = np.concatenate(feat_valid, axis=-1)

    clf, acc_train, acc_valid, acc = pointhop.classify(feat_train, train_label, feat_valid, valid_label, pooling)
    params['clf'] = clf

    time_end = time.time()

    log_string("train acc is {}".format(acc_train))
    log_string('eval acc is {}'.format(acc_valid))
    log_string('eval mean acc is {}'.format(np.mean(acc)))
    log_string('per-class acc is {}'.format(str(acc)))
    log_string('totally time cost is {} minutes'.format((time_end - time_start)//60))

    with open(os.path.join(LOG_DIR, 'params.pkl'), 'wb') as f:
        pickle.dump(params, f)
Beispiel #3
0
def main():
    time_start = time.time()
    # load data
    train_data, train_label = modelnet_data.data_load(
        num_point=initial_point,
        data_dir=os.path.join(BASE_DIR, 'modelnet40_ply_hdf5_2048'),
        train=True)
    test_data, test_label = modelnet_data.data_load(
        num_point=initial_point,
        data_dir=os.path.join(BASE_DIR, 'modelnet40_ply_hdf5_2048'),
        train=False)

    # validation set
    if VALID:
        train_data, train_label, valid_data, valid_label = modelnet_data.data_separate(
            train_data, train_label)
    else:
        valid_data = test_data
        valid_label = test_label

    print(train_data.shape, train_label.shape, valid_data.shape,
          valid_label.shape)

    if ENSEMBLE:
        angle = np.repeat(angle_rotation, freq_rotation)
    else:
        angle = [0]

    feat_valid = []
    for i in range(len(angle)):
        with open(os.path.join(LOG_DIR, 'params.pkl'), 'rb') as f:
            params_total = pickle.load(f)
        params = params_total['params:', i]
        weight = params_total['weight:', i]

        log_string('------------Test {} --------------'.format(i))
        leaf_node_test = pointhop.pointhop_pred(False,
                                                valid_data,
                                                pca_params=params,
                                                n_newpoint=num_point,
                                                n_sample=num_sample)
        feature_valid = pointhop.extract(leaf_node_test)
        feature_valid = np.concatenate(feature_valid, axis=-1)
        if FE is not None:
            fe_ind = params_total['fe_ind:', i]
            feature_valid = feature_valid[:, fe_ind]
        feature_valid, pred_valid = pointhop.llsr_pred(feature_valid, weight)
        feat_valid.append(feature_valid)

        acc_valid = sklearn.metrics.accuracy_score(valid_label, pred_valid)
        acc = pointhop.average_acc(valid_label, pred_valid)
        log_string('test: {} , test mean: {}'.format(acc_valid, np.mean(acc)))
        log_string('per-class: {}'.format(acc))
        valid_data = data_utils.data_augment(valid_data, angle[i])

    if ENSEMBLE:
        weight = params_total['weight ensemble']
        feat_valid = np.concatenate(feat_valid, axis=-1)
        feat_valid, pred_valid = pointhop.llsr_pred(feat_valid, weight)
        acc_valid = sklearn.metrics.accuracy_score(valid_label, pred_valid)
        acc = pointhop.average_acc(valid_label, pred_valid)
        log_string('ensemble test: {}, ensemble test mean: {}'.format(
            acc_valid, np.mean(acc)))
        log_string('ensemble per-class: {}'.format(acc))

    time_end = time.time()
    log_string('totally time cost is {} minutes'.format(
        (time_end - time_start) // 60))