Exemple #1
0
def metric_lmnn_modular(train_fname=traindat,
                        test_fname=testdat,
                        label_train_fname=label_traindat,
                        k=3):
    try:
        from modshogun import RealFeatures, MulticlassLabels, LMNN, KNN, CSVFile
    except ImportError:
        return

    # wrap features and labels into Shogun objects
    feats_train = RealFeatures(CSVFile(train_fname))
    feats_test = RealFeatures(CSVFile(test_fname))
    labels = MulticlassLabels(CSVFile(label_train_fname))

    # LMNN
    lmnn = LMNN(feats_train, labels, k)
    lmnn.train()
    lmnn_distance = lmnn.get_distance()

    # perform classification with KNN
    knn = KNN(k, lmnn_distance, labels)
    knn.train()
    output = knn.apply(feats_test).get_labels()

    return lmnn, output
    def load_train(self):
        ims, labels = self.load( self.test_images, self.test_labels)

        self.test_images = ims
        self.test_labels = labels
        labels_numbers = MulticlassLabels(self.test_labels)
        feats  = RealFeatures(self.test_images.T)
        dist = EuclideanDistance()
        self.knn = KNN(self.k, dist, labels_numbers)
        self.knn.train(feats)
def assign_labels(data, centroids, ncenters):
    from modshogun import EuclideanDistance
    from modshogun import RealFeatures, MulticlassLabels
    from modshogun import KNN
    from numpy import arange

    labels = MulticlassLabels(arange(0., ncenters))
    fea = RealFeatures(data)
    fea_centroids = RealFeatures(centroids)
    distance = EuclideanDistance(fea_centroids, fea_centroids)
    knn = KNN(1, distance, labels)
    knn.train()
    return knn.apply(fea)
Exemple #4
0
def knn(train_features, train_labels, test_features, test_labels, k=1):
    from modshogun import KNN, MulticlassAccuracy, EuclideanDistance

    distance = EuclideanDistance(train_features, train_features)
    knn = KNN(k, distance, train_labels)
    knn.train()
    train_output = knn.apply()
    test_output = knn.apply(test_features)
    evaluator = MulticlassAccuracy()
    print 'KNN training error is %.4f' % (
        (1 - evaluator.evaluate(train_output, train_labels)) * 100)
    print 'KNN test error is %.4f' % (
        (1 - evaluator.evaluate(test_output, test_labels)) * 100)
Exemple #5
0
    def BuildModel(self, data, labels, options):
        # Get all the parameters.
        n = re.search("-n (\d+)", options)

        self.n_neighbors = 5 if not n else int(n.group(1))

        distance = EuclideanDistance(data, data)
        from modshogun import KNN_KDTREE
        knc = KNN(self.n_neighbors, distance, labels, KNN_KDTREE)
        knc.set_leaf_size(30)
        knc.train()

        return knc
Exemple #6
0
    def BuildModel(self, data, labels, options):
        # Get all the parameters.
        n = re.search("-n (\d+)", options)

        self.n_neighbors = 5 if not n else int(n.group(1))

        distance = EuclidianDistance(data, data)
        knc = KNN(self.n_neighbors, distance, labels)
        knc.train()

        # Create and train the classifier.
        knc = LibSvm(self.C, self.kernel, labels)
        knc.train()
        return knc
Exemple #7
0
def knn_classify(traindat, testdat, k=3):
    from modshogun import KNN, MulticlassAccuracy, EuclideanDistance

    train_features, train_labels = traindat.features, traindat.labels

    distance = EuclideanDistance(train_features, train_features)
    knn = KNN(k, distance, train_labels)
    knn.train()

    test_features, test_labels = testdat.features, testdat.labels

    predicted_labels = knn.apply(test_features)
    evaluator = MulticlassAccuracy()
    acc = evaluator.evaluate(predicted_labels, test_labels)
    err = 1 - acc

    return err
Exemple #8
0
    def BuildModel(self, data, labels, options):
        # Get all the parameters.
        if "k" in options:
            n_neighbors = int(options.pop("k"))
        else:
            Log.Fatal("Required parameter 'k' not specified!")
            raise Exception("missing parameter")

        if len(options) > 0:
            Log.Fatal("Unknown parameters: " + str(options))
            raise Exception("unknown parameters")

        distance = EuclideanDistance(data, data)
        knc = KNN(self.n_neighbors, distance, labels, KNN_KDTREE)
        knc.train()

        return knc
def classifier_knn_modular(train_fname=traindat,
                           test_fname=testdat,
                           label_train_fname=label_traindat,
                           k=3):
    from modshogun import RealFeatures, MulticlassLabels, KNN, EuclideanDistance, CSVFile

    feats_train = RealFeatures(CSVFile(train_fname))
    feats_test = RealFeatures(CSVFile(test_fname))
    distance = EuclideanDistance(feats_train, feats_train)

    labels = MulticlassLabels(CSVFile(label_train_fname))

    knn = KNN(k, distance, labels)
    knn_train = knn.train()
    output = knn.apply(feats_test).get_labels()
    multiple_k = knn.classify_for_multiple_k()

    return knn, knn_train, output, multiple_k
Exemple #10
0
def lmnn(train_features, train_labels, test_features, test_labels, k=1):
    from modshogun import LMNN, KNN, MSG_DEBUG, MulticlassAccuracy
    import numpy

    # 	dummy = LMNN()
    # 	dummy.io.set_loglevel(MSG_DEBUG)

    lmnn = LMNN(train_features, train_labels, k)
    lmnn.train()
    distance = lmnn.get_distance()

    knn = KNN(k, distance, train_labels)
    knn.train()

    train_output = knn.apply()
    test_output = knn.apply(test_features)
    evaluator = MulticlassAccuracy()
    print 'LMNN training error is %.4f' % (
        (1 - evaluator.evaluate(train_output, train_labels)) * 100)
    print 'LMNN test error is %.4f' % (
        (1 - evaluator.evaluate(test_output, test_labels)) * 100)
Exemple #11
0
def lmnn_classify(traindat, testdat, k=3):
    from modshogun import LMNN, KNN, MulticlassAccuracy, MSG_DEBUG

    train_features, train_labels = traindat.features, traindat.labels

    lmnn = LMNN(train_features, train_labels, k)
    lmnn.set_maxiter(1200)
    lmnn.io.set_loglevel(MSG_DEBUG)
    lmnn.train()

    distance = lmnn.get_distance()
    knn = KNN(k, distance, train_labels)
    knn.train()

    test_features, test_labels = testdat.features, testdat.labels

    predicted_labels = knn.apply(test_features)
    evaluator = MulticlassAccuracy()
    acc = evaluator.evaluate(predicted_labels, test_labels)
    err = 1 - acc

    return err
Exemple #12
0
    def KNNAccuracy(distance, data, k, flag):
        transformedData = np.dot(data[0], distance.T)
        feat = RealFeatures(transformedData.T)
        labels = MulticlassLabels(data[1].astype(np.float64))
        dist = EuclideanDistance(feat, feat)
        knn = KNN(k + 1, dist, labels)
        knn.train(feat)
        # Get nearest neighbors.
        nn = knn.nearest_neighbors()
        nn = np.delete(nn, 0, 0)
        # Compute unique labels.
        uniqueLabels = np.unique(labels)
        # Keep count correct predictions.
        count = 0
        # Normalize labels
        for i in range(data[0].shape[0]):
            for j in range(len(uniqueLabels)):
                if (labels[i] == uniqueLabels[j]):
                    labels[i] = j
                    break

        for i in range(nn.shape[1]):
            mapLabels = [0 for x in range(len(uniqueLabels))]
            for j in range(nn.shape[0]):
                if (flag):
                    distPoints = np.linalg.norm(data[0][nn[j][i], :] -
                                                data[0][i, :])
                    # Add constant factor of 1 incase two points overlap
                    mapLabels[int(labels[nn[j, i]])] += 1 / (distPoints + 1)**2
                else:
                    # Subtract a variable factor to avoid draw condition without
                    # affecting actual result.
                    mapLabels[int(labels[nn[j, i]])] += 1 - j * 1e-8
            maxInd = np.argmax(mapLabels)
            if (maxInd == labels[i]):
                count += 1
        accuracy = (count / nn.shape[1]) * 100
        return accuracy
Exemple #13
0
def lmnn_diagonal(train_features,
                  train_labels,
                  test_features,
                  test_labels,
                  k=1):
    from modshogun import LMNN, KNN, MSG_DEBUG, MulticlassAccuracy
    import numpy

    lmnn = LMNN(train_features, train_labels, k)
    lmnn.set_diagonal(True)
    lmnn.train()
    distance = lmnn.get_distance()

    knn = KNN(k, distance, train_labels)
    knn.train()

    train_output = knn.apply()
    test_output = knn.apply(test_features)
    evaluator = MulticlassAccuracy()
    print 'LMNN-diagonal training error is %.4f' % (
        (1 - evaluator.evaluate(train_output, train_labels)) * 100)
    print 'LMNN-diagonal test error is %.4f' % (
        (1 - evaluator.evaluate(test_output, test_labels)) * 100)
Exemple #14
0
        axis.plot(xs, ys, COLS[int(y[i])])


figure, axarr = pyplot.subplots(3, 1)
x, y = sandwich_data()

features = RealFeatures(x.T)
labels = MulticlassLabels(y)

print('%d vectors with %d features' %
      (features.get_num_vectors(), features.get_num_features()))
assert (features.get_num_vectors() == labels.get_num_labels())

distance = EuclideanDistance(features, features)
k = 2
knn = KNN(k, distance, labels)

plot_data(x, y, axarr[0])
plot_neighborhood_graph(x, knn.nearest_neighbors(), axarr[0])
axarr[0].set_aspect('equal')
axarr[0].set_xlim(-6, 4)
axarr[0].set_ylim(-3, 2)

lmnn = LMNN(features, labels, k)
lmnn.set_maxiter(10000)
lmnn.train()
L = lmnn.get_linear_transform()
knn.set_distance(lmnn.get_distance())

plot_data(x, y, axarr[1])
plot_neighborhood_graph(x, knn.nearest_neighbors(), axarr[1])
Exemple #15
0
def evaluate(labels,
             feats,
             params={
                 'n_neighbors': 2,
                 'use_cover_tree': 'True',
                 'dist': 'Manhattan'
             },
             Nsplit=2):
    """
        Run Cross-validation to evaluate the KNN.

        Parameters
        ----------
        labels: 2d array
            Data set labels.
        feats: array
            Data set feats.
        params: dictionary
            Search scope parameters.
        Nsplit: int, default = 2
            The n for n-fold cross validation.
        all_ks: range of int, default = range(1, 21)
            Numbers of neighbors.
    """
    k = params.get('n_neighbors')
    use_cover_tree = params.get('use_cover_tree') == 'True'
    if params.get('dist' == 'Euclidean'):
        func_dist = EuclideanDistance
    else:
        func_dist = ManhattanMetric

    split = CrossValidationSplitting(labels, Nsplit)
    split.build_subsets()

    accuracy = np.zeros(Nsplit)
    acc_train = np.zeros(accuracy.shape)
    time_test = np.zeros(accuracy.shape)
    for i in range(Nsplit):
        idx_train = split.generate_subset_inverse(i)
        idx_test = split.generate_subset_indices(i)

        feats.add_subset(idx_train)
        labels.add_subset(idx_train)

        dist = func_dist(feats, feats)
        knn = KNN(k, dist, labels)
        knn.set_store_model_features(True)
        if use_cover_tree:
            knn.set_knn_solver_type(KNN_COVER_TREE)
        else:
            knn.set_knn_solver_type(KNN_BRUTE)
        knn.train()

        evaluator = MulticlassAccuracy()
        pred = knn.apply_multiclass()
        acc_train[i] = evaluator.evaluate(pred, labels)

        feats.remove_subset()
        labels.remove_subset()
        feats.add_subset(idx_test)
        labels.add_subset(idx_test)

        t_start = time.clock()
        pred = knn.apply_multiclass(feats)
        time_test[i] = (time.clock() - t_start) / labels.get_num_labels()

        accuracy[i] = evaluator.evaluate(pred, labels)

        feats.remove_subset()
        labels.remove_subset()
    print accuracy.mean()
    return accuracy