Beispiel #1
0
def main():
    np.random.seed(100)
    tf.set_random_seed(100)

    mnist = input_data.read_data_sets(tf.app.flags.FLAGS.data_dir,
                                      one_hot=True)
    minst_no_oh = input_data.read_data_sets(tf.app.flags.FLAGS.data_dir,
                                            one_hot=False)

    example_number = mnist.train.images.shape[0]
    feature_number = mnist.train.images.shape[1]
    class_num = mnist.train.labels.shape[1]

    X = mnist.train.images
    Yoh_ = mnist.train.labels
    Y_ = minst_no_oh.train.labels

    Xtest = mnist.test.images
    Ytest_ = minst_no_oh.test.labels

    print(example_number, feature_number, class_num)

    # Construct the computing graph
    deep_model = td.TFDeep(nn_configuration=[feature_number, class_num],
                           param_delta=0.2,
                           param_lambda=0,
                           no_linearity_function=tf.nn.relu,
                           adam=True,
                           decay=True)

    #deep_model.train(X, Yoh_, param_niter=10)
    deep_model.train_mb(mnist, epoch_number=100, batch_size=100)
    deep_model.eval(X, Yoh_)

    accuracy, recall, precision = data.eval_perf_binary(
        deep_model.predict(X), Y_)
    print('TRAINING\nAcc: {0}\nRecall: {1}\nPrecision: {2}\n'.format(
        accuracy, recall, precision))

    accuracy, recall, precision = data.eval_perf_binary(
        deep_model.predict(Xtest), Ytest_)
    print('\nTEST\nAcc: {0}\nRecall: {1}\nPrecision: {2}\n'.format(
        accuracy, recall, precision))

    print_all_numbers(deep_model)
Beispiel #2
0
def main():
    np.random.seed(100)

    X, Y_ = data.sample_gauss_2d(2, 100)
    w, b = binlogreg_train(X, Y_)

    probabilities = binlogreg_classify(X, w,b)
    Y = np.where(probabilities >= .5, 1, 0)

    accuracy, recall, precision = data.eval_perf_binary(Y, Y_)
    AP = data.eval_AP(Y_[probabilities.argsort()])
    print('Acc: {0}\nRecall: {1}\nPrecision: {2}\nAP: {3}\n'.format(accuracy, recall, precision, AP))
Beispiel #3
0
def __test():
    np.random.seed(100)
    X, Y_ = data.sample_gmm(6, 2, 10)
    svm = KSVMWrap(X, Y_)
    Y = svm.predict(X)

    accuracy, recall, precision = data.eval_perf_binary(Y, Y_)
    print("Accuracy : ", accuracy)
    print("Precision : ", precision)
    print("Recall : ", recall)

    bbox = (np.min(X, axis=0), np.max(X, axis=0))
    data.graph_surface(svm.get_scores, bbox, offset=0)
    data.graph_data(X, Y_, Y, special=svm.support())
    plt.show()
Beispiel #4
0
def main():
    # create the dataset
    X, Y_, Yoh_ = data.sample_gmm_2d(K=6, C=2, N=10)
    model = fcann2_train(X, Y_)

    # fit the model
    probabilities = fcann2_classify(X, model)
    Y = np.argmax(probabilities, axis=1)

    # evaluate the model
    accuracy, recall, precision = data.eval_perf_binary(Y, Y_)
    print('Acc: {0}\nRecall: {1}\nPrecision: {2}\n'.format(
        accuracy, recall, precision))

    # graph the data points
    bbox = (np.min(X, axis=0), np.max(X, axis=0))
    data.graph_surface(fcann2_decfun(X, model), bbox, offset=0)
    data.graph_data(X, Y_, Y)

    # show the resultsfcann2_train
    #plt.savefig('fcann2_classification.png')
    plt.show()
def main():
    np.random.seed(100)

    # Init the dataset
    class_num = 3
    X, Y_, Yoh_ = data.sample_gmm_2d(K=6, C=class_num, N=40)

    # Train the model
    svm = KSVMWrap(C=1.0, X=X, Y_=Y_, kernel='rbf')

    # Plot the results
    bbox = (np.min(X, axis=0), np.max(X, axis=0))
    data.graph_surface(svm_classify(X, svm), bbox, offset=0)
    data.graph_data(X, Y_, svm.predict(X), svm.get_scores())

    # show the results
    #plt.savefig('svm.png')
    plt.show()

    accuracy, recall, precision = data.eval_perf_binary(svm.predict(X), Y_)
    print('Acc: {0}\nRecall: {1}\nPrecision: {2}\n'.format(accuracy, recall, precision))

    svm.get_scores()
Beispiel #6
0
    return w,b


'''
Arguments
    X:    data, np.array NxD
    w, b: logistic regression parameters

Return values
    probs: a posteriori probabilities for c1, dimensions Nx1
'''
def binlogreg_classify(X,w,b):
    return data.sigmoid(np.dot(X, w) + b)


np.random.seed(100)
X,Y_ = data.sample_gauss_2d(2, 100)
w,b = binlogreg_train(X, Y_, param_niter=0)
probs = binlogreg_classify(X, w,b)

Y = []
for prob in probs:
    if prob < 0.5:
        Y.append(False)
    else:
        Y.append(True)

accuracy, recall, precision = data.eval_perf_binary(Y, Y_)
AP = data.eval_AP(Y_[probs.argsort()])
#print (accuracy, recall, precision, AP)
Beispiel #7
0
        self.session.run(tf.initialize_all_variables())

    def train(self, X, Y_, param_niter):
        for i in range(param_niter):
            _, val_loss = self.session.run([self.train_step, self.loss], feed_dict={self.X: X, self.Y_: Y_})
            if i % 1000 == 0:
                print i, val_loss

    def eval(self, X):
        P = self.session.run([self.probs], feed_dict={self.X: X})

        return P


if __name__ == '__main__':
    X, y = sample_gmm_2d(6, 3, 50)
    C = len(np.lib.arraysetops.unique(y))

    y_ = OneHotEncoder().fit_transform(y).toarray()

    logreg = TFLogreg(2, C)
    logreg.train(X, y_, 10000)
    probs = logreg.eval(X)
    Y = np.argmax(probs[0], axis=1)
    
    y = y.flatten()
    print eval_perf_binary(Y, y)

    bbox = (np.min(X, axis=0), np.max(X, axis=0))
    graph_surface(lambda x: np.argmax(logreg.eval(x)[0], axis=1), bbox, offset=0.5)
    graph_data(X, Y, y)
Beispiel #8
0
                print(i, loss)

    def eval(self, X):
        return self.session.run([self.probs], feed_dict={self.X: X})[0]

    def count_params(self):
        for v in tf.trainable_variables():
            print(v.name)
        total_count = 0
        for i in range(1, len(self.h)):
            total_count += self.h[i] * self.h[i - 1]
        total_count += sum(self.h[1:])
        print("Total parameter count: " + str(total_count))


if __name__ == '__main__':
    (X, Y_) = data.sample_gmm_2d(6, 2, 10)
    N, D = X.shape
    C = 2
    Yoh_ = np.zeros((N, C))
    Yoh_[range(N), Y_.astype(int)] = 1
    model = TFDeep([2, 3, 2])
    model.train(X, Yoh_, 1000)
    probs = model.eval(X)
    model.count_params()
    Y = np.argmax(probs, axis=1)
    print(data.eval_perf_binary(Y, np.argmax(Yoh_, axis=1)))
    bbox = (np.min(X, axis=0), np.max(X, axis=0))
    data.graph_surface(model.eval, bbox, offset=0.5)
    data.graph_data(X, Y_, Y)
    # Example of dimensions
    ptdeep1, result = test(X,
                           Y,
                           dims=[2, 5, C],
                           func=torch.relu,
                           param_niter=1,
                           param_delta=0.001,
                           plot=False)
    ptdeep1.count_params()

    # Extra tasks.
    X1, Y1 = data.sample_gmm_2d(4, 2, 40)
    X2, Y2 = data.sample_gmm_2d(6, 2, 10)

    m11, r11 = test(X1, Y1, dims=[2, 2], func=torch.relu)
    m12, r12 = test(X1, Y1, dims=[2, 10, 2], func=torch.relu)
    m13, r13 = test(X1, Y1, dims=[2, 10, 10, 2], func=torch.relu)

    n21, r21 = test(X2, Y2, dims=[2, 2], func=torch.relu)
    n22, r22 = test(X2, Y2, dims=[2, 10, 2], func=torch.relu)
    n23, r23 = test(X2, Y2, dims=[2, 10, 10, 2], func=torch.relu)

    apr_print(data.eval_perf_binary(np.argmax(r11, axis=1), Y1))
    apr_print(data.eval_perf_binary(np.argmax(r12, axis=1), Y1))
    apr_print(data.eval_perf_binary(np.argmax(r13, axis=1), Y1))

    apr_print(data.eval_perf_binary(np.argmax(r21, axis=1), Y2))
    apr_print(data.eval_perf_binary(np.argmax(r22, axis=1), Y2))
    apr_print(data.eval_perf_binary(np.argmax(r23, axis=1), Y2))
Beispiel #10
0
from data import graph_data, graph_surface, sample_gmm_2d, eval_perf_binary


class KSVMWrapper(object):
    def __init__(self, X, Y_, c=1, g='auto'):
        self.clf = SVC(C=c, gamma=g)
        self.clf.fit(X, Y_)

    def predict(self, X):
        return self.clf.predict(X)

    def get_scores(self, X):
        return self.clf.decision_function(X)

    @property
    def support(self):
        return self.clf.support_


if __name__ == '__main__':

    X, y = sample_gmm_2d(6, 2, 10)

    ksvmw = KSVMWrapper(X, y)
    y_ = ksvmw.predict(X)

    y = y.flatten()
    print eval_perf_binary(y_, y)
    bbox = (np.min(X, axis=0), np.max(X, axis=0))
    graph_surface(lambda x: ksvmw.get_scores(x), bbox, offset=0)
    graph_data(X, y_, y, special=ksvmw.support)
Beispiel #11
0
from sklearn import svm
import numpy as np
import data


class KSVMWrap:
    def __init__(self, X, Y_, param_svm_c=1, param_svm_gamma='auto'):
        self.clf = svm.SVC(C=param_svm_c, gamma=param_svm_gamma)
        self.clf.fit(X, Y_)

    def predict(self, X):
        return self.clf.predict(X)

    def get_scores(self, X):
        return self.clf.decision_function(X)

    def support(self):
        return self.clf.support_


if __name__ == "__main__":
    np.random.seed(100)
    (X, Y_) = data.sample_gmm_2d(6, 2, 10)
    ksvm = KSVMWrap(X, Y_)
    Y = ksvm.predict(X)
    acc, prec, rec, avg_prec = data.eval_perf_binary(Y, Y_)
    print("Accuracy: {}\nPrecision: {}\nRecall: {}\nAverage Precision: {}".
          format(acc, prec, rec, avg_prec))
    data.graph_surface(ksvm.get_scores, (np.min(X, axis=0), np.max(X, axis=0)))
    data.graph_data(X, Y_, Y, special=ksvm.support())