예제 #1
0
    tf.set_random_seed(100)

    X, Y_ = data.sample_gauss(3, 100)
    Yoh_ = class_to_onehot(Y_)

    #building graph
    tlfr = TFLogreg(X.shape[1], Yoh_.shape[1], 0.1)

    # learning parameters
    tlfr.train(X, Yoh_, 1000)

    # fetch probabilities on train set
    probs = tlfr.eval(X)
    Y = np.argmax(probs[0], axis=1)

    accuracy, pr, M = data.eval_perf_multi(Y, Y_)
    print("Accuracy: ", accuracy)
    print("Precision / Recall: ", pr)
    print("Confussion Matrix: ", M)

    #graph the decision surface
    decfun = logreg.logreg_decfun(X, W, b)
    bbox = (np.min(X, axis=0), np.max(X, axis=0))
    data.graph_surface(decfun, bbox, offset=0.5)

    # graph the data points
    data.graph_data(X, Y_, Y, special=[])

    # show the plot
    plt.show()
예제 #2
0
  print(probs)
  #predicted labels
  Y = np.vstack([ 0 if (probs[i]<0.5) else 1 for i in range(Y_.shape[0])])

  #evaluation metrics
  accuracy, recall, precision = data.eval_perf_binary(Y, Y_)
  print("Accuracy: ",accuracy)
  print("Precision: ",precision)
  print("Recall: ",recall)  
  AP = data.eval_AP(np.vstack(Y_[probs.reshape(1,200).argsort()]))
  print("Average precision: ",AP)
  
  #graph the decision surface
  decfun = binlogreg_decfun(X,w,b)
  bbox=(np.min(X, axis=0), np.max(X, axis=0))
  data.graph_surface(decfun, bbox, offset=0.5)

  # graph the data points
  data.graph_data(X, np.hstack(Y_), np.hstack(Y), special=[])

  # show the plot
  plt.show()



	  

  
  
  
예제 #3
0
            if i % 10 == 0:
                print i, val_loss

    def eval(self, X):
        P = self.session.run([self.probs], feed_dict={self.X: X})

        return P


if __name__ == '__main__':

    X, y = sample_gmm_2d(6, 3, 10)

    y_ = OneHotEncoder().fit_transform(y).toarray()

    deep = TFDeep([2, 3])
    deep.train(X, y_, 10000)

    y = y.flatten()

    probs = deep.eval(X)
    Y = np.argmax(probs[0], axis=1)

    print eval_perf_binary(Y, y)

    bbox = (np.min(X, axis=0), np.max(X, axis=0))
    graph_surface(lambda x: np.argmax(deep.eval(x)[0], axis=1),
                  bbox,
                  offset=0.5)
    graph_data(X, Y, y)
예제 #4
0
        b_1 += -delta * grad_b1
        b_2 += -delta * grad_b2

    return w_1, w_2, b_1, b_2


if __name__ == "__main__":
    X, y = sample_gmm_2d(6, 4, 30)

    C = len(np.lib.arraysetops.unique(y))

    #X = np.array([[1, 2], [2, 3], [4, 5]])
    #y = np.array([0, 1, 1])[np.newaxis]

    y_ = OneHotEncoder().fit_transform(y).toarray()

    w_1, w_2, b_1, b_2 = fcann_train(X, y_, C)

    probs, _ = forward(X, w_1, w_2, b_1, b_2)

    Y = np.argmax(probs, axis=1)
    y = y.flatten()
    print eval_perf_binary(Y, y)

    bbox = (np.min(X, axis=0), np.max(X, axis=0))
    graph_surface(
        lambda x: np.argmax(forward(x, w_1, w_2, b_1, b_2)[0], axis=1),
        bbox,
        offset=0.5)
    graph_data(X, y, Y)
예제 #5
0
        return self.clf.support_


if __name__ == "__main__":
    # inicijaliziraj generatore slucajnih brojeva
    np.random.seed(100)

    # instanciraj podatke X i labele Yoh_

    X, Y_ = sample_gmm_2d(6, 2, 10)
    #print(Y_)
    ksvm = KSVMWrap(X, Y_)

    # nauci parametre
    scores = ksvm.scores(X)
    Y = ksvm.predict(X)

    accuracy, pr, M = data.eval_perf_multi(Y, Y_)
    print("Accuracy: ", accuracy)
    print("Precision / Recall: ", pr)
    print("Confussion Matrix: ", M)

    #graph the decision surface
    decfun = lambda x: ksvm.scores(x)
    bbox = (np.min(X, axis=0), np.max(X, axis=0))
    data.graph_surface(decfun, bbox, offset=0.5)
    data.graph_data(X, Y_, Y, special=[ksvm.support()])

    # show the plot
    plt.show()
예제 #6
0
                print(i, loss)

    def eval(self, X):
        return self.session.run([self.probs], feed_dict={self.X: X})[0]

    def count_params(self):
        for v in tf.trainable_variables():
            print(v.name)
        total_count = 0
        for i in range(1, len(self.h)):
            total_count += self.h[i] * self.h[i - 1]
        total_count += sum(self.h[1:])
        print("Total parameter count: " + str(total_count))


if __name__ == '__main__':
    (X, Y_) = data.sample_gmm_2d(6, 2, 10)
    N, D = X.shape
    C = 2
    Yoh_ = np.zeros((N, C))
    Yoh_[range(N), Y_.astype(int)] = 1
    model = TFDeep([2, 3, 2])
    model.train(X, Yoh_, 1000)
    probs = model.eval(X)
    model.count_params()
    Y = np.argmax(probs, axis=1)
    print(data.eval_perf_binary(Y, np.argmax(Yoh_, axis=1)))
    bbox = (np.min(X, axis=0), np.max(X, axis=0))
    data.graph_surface(model.eval, bbox, offset=0.5)
    data.graph_data(X, Y_, Y)
예제 #7
0

def fcann2_decfun(w, b):
    def classify(X):
        return np.argmax(fcann2_classify(X, w, b), axis=1)

    return classify


if __name__ == "__main__":
    X, Y_ = sample_gmm_2d(6, 2, 10)
    meanX = X.mean()
    stdX = X.std()
    X = (X - meanX) / stdX
    w, b = fcann2_train(X, Y_, param_lambda=1e-5,
                        param_delta=1e-5)  #param_niter

    # graph the decision surface
    rect = (np.min(X, axis=0), np.max(X, axis=0))
    graph_surface(fcann2_decfun(w, b), rect, offset=0.5)

    #print(fcann2_classify(X, w, b))
    # graph the data points
    graph_data(X, Y_, np.argmax(fcann2_classify(X, w, b), axis=1), special=[])

    # graph_data(X, Y_, list(map(lambda x: np.argmax(x), fcann2_classify(X, w, b))), special=[])

    plt.show()

    # Finish
예제 #8
0
    def predict(self, X):
        return self.model.predict(X)

    def get_scores(self, X):
        return self.model.predict_proba(X)

    def support(self):
        return self.model.support_


if __name__ == '__main__':
    import numpy as np
    import data
    import matplotlib.pyplot as plt

    np.random.seed(100)

    C = 2
    n = 10
    X, Y_, Yoh_ = data.sample_gmm_2d(6, 2, 20, one_hot=True)

    model = SVMWrapper(X, Y_, c=1, g='auto')
    decfun = lambda x: model.get_scores(x)[:, 1]
    probs = model.get_scores(X)
    Y = probs.argmax(axis=1)

    bbox = (np.min(X, axis=0), np.max(X, axis=0))
    data.graph_surface(decfun, bbox, offset=0.5)
    data.graph_data(X, Y_, Y, model.support())
    plt.show()
예제 #9
0
        # poboljšani parametri
        w += -param_delta * grad_w
        b += -param_delta * grad_b

    return w, b


if __name__ == "__main__":
    np.random.seed(100)
    # get the training dataset
    X, Y_ = data.sample_gauss(2, 100)
    # train the model
    w, b = binlogreg_train(X, data.class_to_onehot(Y_))
    # evaluate the model on the training dataset
    probs = binlogreg_classify(X, w, b)
    Y = probs > 0.5

    # report performance
    accuracy, recall, precision = data.eval_perf_binary(Y[:, -1], Y_)
    AP = data.eval_AP(Y_)
    print(accuracy, recall, precision, AP)

    # graph the decision surface
    rect = (np.min(X, axis=0), np.max(X, axis=0))
    data.graph_surface(lambda x: binlogreg_classify(x, w, b), rect, offset=0.5)

    # graph the data points
    data.graph_data(X, Y_, Y[:, -1], special=[])

    plt.show()
예제 #10
0

class KSVMWrapper(object):
    def __init__(self, X, Y_, c=1, g='auto'):
        self.clf = SVC(C=c, gamma=g)
        self.clf.fit(X, Y_)

    def predict(self, X):
        return self.clf.predict(X)

    def get_scores(self, X):
        return self.clf.decision_function(X)

    @property
    def support(self):
        return self.clf.support_


if __name__ == '__main__':

    X, y = sample_gmm_2d(6, 2, 10)

    ksvmw = KSVMWrapper(X, y)
    y_ = ksvmw.predict(X)

    y = y.flatten()
    print eval_perf_binary(y_, y)
    bbox = (np.min(X, axis=0), np.max(X, axis=0))
    graph_surface(lambda x: ksvmw.get_scores(x), bbox, offset=0)
    graph_data(X, y_, y, special=ksvmw.support)
예제 #11
0
        self.clf = self.clf.fit(X, Y_)

        self.support = self.clf.support_  #Indeksi podataka koji su odabrani za potporne vektore

        self.Y_ = Y_

    def predict(self, X):
        return self.clf.predict(X)

    def get_scores(self, X):
        return classification_report(self.Y_, self.predict(X))


if __name__ == "__main__":
    # inicijaliziraj generatore slučajnih brojeva
    np.random.seed(100)

    #X,Y_ = data.sample_gmm(4, 2, 40)
    X, Y_ = data.sample_gmm(6, 2, 10)
    Yoh_ = data.class_to_onehot(Y_)

    svm2 = KSVMWrap(X, Y_)

    print(svm2.get_scores(X))

    bbox = (np.min(X, axis=0), np.max(X, axis=0))
    data.graph_surface(svm2.predict, bbox, offset=0.5, width=256, height=256)
    data.graph_data(X, Y_, svm2.predict(X), special=svm2.support)

    plt.show()
예제 #12
0

def svm_decfun(model):
    return lambda X: model.get_scores(X)[np.arange(len(X)), 1]


if __name__ == "__main__":
    # inicijaliziraj generatore slučajnih brojeva
    np.random.seed(100)

    # instanciraj podatke X i labele Yoh_
    X, Y_ = data.sample_gmm_2d(6, 2, 10)

    # definiraj model:
    svm = KSVMWrap(X, Y_, 10, 'auto')

    # dohvati vjerojatnosti na skupu za učenje
    Y = svm.predict(X)

    # ispiši performansu (preciznost i odziv po razredima)
    accuracy, pr, _ = data.eval_perf_multi(Y, Y_)
    print(f'accuracy: {accuracy}, precision: {pr[0]}, recall: {pr[1]}')

    # iscrtaj rezultate, decizijsku plohu
    decfun = svm_decfun(svm)
    bbox = (np.min(X, axis=0), np.max(X, axis=0))
    data.graph_surface(decfun, bbox, offset=0.5)
    data.graph_data(X, Y_, Y, special=svm.support())

    # Prikaži
    plt.show()
예제 #13
0

if __name__ == "__main__":
    # inicijaliziraj generatore slučajnih brojeva
    np.random.seed(100)

    # instanciraj podatke X i labele Yoh_
    X, Y = sample_gauss_2d(2, 10)

    #Yoh_ = class_to_onehot(Y)

    #X = torch.tensor(X)
    #Yoh_ = torch.tensor(Yoh_)

    # definiraj model:
    ptlr = PTLogreg(X.shape[1], max(Y) + 1)

    # nauči parametre (X i Yoh_ moraju biti tipa torch.Tensor):
    train(ptlr, X, Y, param_niter=1e5, param_delta=0.001)

    # dohvati vjerojatnosti na skupu za učenje
    probs = evaluation(ptlr, X)

    # ispiši performansu (preciznost i odziv po razredima)
    rect = (np.min(X, axis=0), np.max(X, axis=0))
    graph_surface(logreg_decfun(ptlr), rect, offset=0.5)
    graph_data(X, Y, np.argmax(evaluation(ptlr, X), axis=1), special=[])

    plt.show()

    # iscrtaj rezultate, decizijsku plohu
예제 #14
0
    def support(self):
        return self.svm.support_


if __name__ == "__main__":
    # inicijaliziraj generatore slučajnih brojeva
    np.random.seed(69)

    # instanciraj podatke X i labele Yoh_
    X, Y_ = data.sample_gmm_2d(6, 2, 10)

    # definiraj model:
    model = KSVMwrap(X, Y_)

    # dohvati vjerojatnosti na skupu za učenje
    probs = model.get_scores(X)
    Y = model.predict(X)

    # ispiši performansu (preciznost i odziv po razredima)
    accuracy, recall, precision = data.eval_perf_multi(Y, Y_)
    print(accuracy, recall, precision)

    # iscrtaj rezultate, decizijsku plohu
    decfun = lambda X: model.get_scores(X)[:, 1]
    bbox = (np.min(X, axis=0), np.max(X, axis=0))
    data.graph_surface(decfun, bbox, offset=0.5)

    data.graph_data(X, Y_, Y, special=[model.support()])

    plt.show()
예제 #15
0
    def __init__(self, X, Y_, param_svm_c=1, param_svm_gamma='auto'):
        self.clf = SVC(C=param_svm_c, gamma=param_svm_gamma, probability=True)
        self.clf.fit(X, Y_)

    def predict(self, X):
        return self.clf.predict(X)

    def get_scores(self, X):
        return self.clf.predict_proba(X)

    def support(self):
        return self.clf.support_


if __name__ == '__main__':
    np.random.seed(100)

    X, Y_ = data.sample_gmm_2d(6, 2, 10)

    clf = KSVMWrap(X, Y_)

    dec_fun = lambda X: clf.get_scores(X)[:, 1]
    probs = clf.get_scores(X)
    Y = probs.argmax(axis=1)

    rect = (np.min(X, axis=0), np.max(X, axis=0))
    data.graph_surface(dec_fun, rect)
    data.graph_data(X, Y_, Y, special=clf.support())

    plt.show()