Ejemplo n.º 1
0
def main():
    np.random.seed(100)

    X, Y_ = data.sample_gauss_2d(2, 100)
    w, b = binlogreg_train(X, Y_)

    probabilities = binlogreg_classify(X, w,b)
    Y = np.where(probabilities >= .5, 1, 0)

    accuracy, recall, precision = data.eval_perf_binary(Y, Y_)
    AP = data.eval_AP(Y_[probabilities.argsort()])
    print('Acc: {0}\nRecall: {1}\nPrecision: {2}\nAP: {3}\n'.format(accuracy, recall, precision, AP))
Ejemplo n.º 2
0
    return w,b


'''
Arguments
    X:    data, np.array NxD
    w, b: logistic regression parameters

Return values
    probs: a posteriori probabilities for c1, dimensions Nx1
'''
def binlogreg_classify(X,w,b):
    return data.sigmoid(np.dot(X, w) + b)


np.random.seed(100)
X,Y_ = data.sample_gauss_2d(2, 100)
w,b = binlogreg_train(X, Y_, param_niter=0)
probs = binlogreg_classify(X, w,b)

Y = []
for prob in probs:
    if prob < 0.5:
        Y.append(False)
    else:
        Y.append(True)

accuracy, recall, precision = data.eval_perf_binary(Y, Y_)
AP = data.eval_AP(Y_[probs.argsort()])
#print (accuracy, recall, precision, AP)
Ejemplo n.º 3
0
        probs: vjerojatnosti razreda c1
    '''

    scores = np.dot(X, w) + b  # N x 1
    return (1 / (1 + np.exp(-scores))).flatten()


def binlogreg_decfun(w, b):
    return lambda X: binlogreg_classify(X, w, b)


if __name__ == "__main__":
    np.random.seed(99)

    # get the training dataset
    X, Y_ = data.sample_gauss_2d(2, 42)

    # train the model
    w, b = binlogreg_train(X, Y_)

    # evaluate the model on the training dataset
    probs = binlogreg_classify(X, w, b)
    Y = (probs > 0.5).astype(int)

    # report performance
    accuracy, recall, precision = data.eval_perf_binary(Y, Y_)
    AP = data.eval_AP(Y_[probs.argsort()])
    print(accuracy, recall, precision, AP)

    # graph the decision surface
    decfun = binlogreg_decfun(w, b)
Ejemplo n.º 4
0
    return ptdeep, probs


def apr_print(values):
    print("Accuracy:", values[0], "Recall:", values[1], "Precision:",
          values[2])


if __name__ == "__main__":
    # inicijaliziraj generatore slučajnih brojeva
    np.random.seed(100)

    # instanciraj podatke X i labele Yoh_

    C = 2
    X, Y = data.sample_gauss_2d(C, 10)

    # Same as task 4
    test(X,
         Y,
         dims=[2, C],
         func=torch.relu,
         param_niter=1e5,
         param_delta=0.001)

    # Example of dimensions
    ptdeep1, result = test(X,
                           Y,
                           dims=[2, 5, C],
                           func=torch.relu,
                           param_niter=1,
Ejemplo n.º 5
0
        """
        probs =  self.session.run(self.probs, {self.X: X})
        return probs

if __name__ == "__main__":
    import numpy as np
    import data
    import matplotlib.pyplot as plt

    tf.reset_default_graph()
    np.random.seed(100)
    tf.set_random_seed(100)

    C = 3
    n = 100
    X, Y_, Yoh_ = data.sample_gauss_2d(C, n, one_hot=True)


    tflr = TFLogreg(X.shape[1], Yoh_.shape[1], 0.1, 0.25)
    tflr.train(X, Yoh_, 1000)

    probs = tflr.eval(X)
    Y = probs.argmax(axis=1)
    decfun = lambda x: tflr.eval(x).argmax(axis=1)


    # eval
    mat, classes = data.confusion_mat(y_pred=Y, y_true=Y_)
    APs = data.eval_AP_multi(Y_=Y_, probs=probs)
    print(mat)
    print(APs)
Ejemplo n.º 6
0
from data import sample_gauss_2d, class_to_onehot, graph_data, graph_surface


def logreg_decfun(ptlr):
    def classify(X):
        return np.argmax(evaluation(ptlr, X), axis=1)

    return classify


if __name__ == "__main__":
    # inicijaliziraj generatore slučajnih brojeva
    np.random.seed(100)

    # instanciraj podatke X i labele Yoh_
    X, Y = sample_gauss_2d(2, 10)

    #Yoh_ = class_to_onehot(Y)

    #X = torch.tensor(X)
    #Yoh_ = torch.tensor(Yoh_)

    # definiraj model:
    ptlr = PTLogreg(X.shape[1], max(Y) + 1)

    # nauči parametre (X i Yoh_ moraju biti tipa torch.Tensor):
    train(ptlr, X, Y, param_niter=1e5, param_delta=0.001)

    # dohvati vjerojatnosti na skupu za učenje
    probs = evaluation(ptlr, X)
Ejemplo n.º 7
0
    def eval(self, X):
        """Arguments:
			- X: actual datapoints [NxD]
			Returns: predicted class probabilites [NxC]
		"""
        #   koristiti: tf.Session.run
        probs = self.session.run(self.probs, {self.X: X})
        return probs


if __name__ == '__main__':
    np.random.seed(100)
    tf.set_random_seed(100)

    X, Y_, Yoh_ = data.sample_gauss_2d(3, 100, one_hot=True)

    _, D = X.shape
    _, C = Yoh_.shape

    tflr = TFLogreg(D, C, 0.1, 0.25)
    tflr.train(X, Yoh_, 1000)

    probs = tflr.eval(X)

    Y = probs.argmax(axis=1)
    dec_fun = lambda X: tflr.eval(X).argmax(axis=1)

    rect = (np.min(X, axis=0), np.max(X, axis=0))

    data.graph_surface(dec_fun, rect)
Ejemplo n.º 8
0
    return accuracy, precision, recall


def pt_logreg_decfun(model):
    return lambda X: eval(model, X)[np.arange(len(X)), 1]


if __name__ == "__main__":
    # inicijaliziraj generatore slučajnih brojeva
    np.random.seed(42)

    C = 3
    N = 42

    # instanciraj podatke X i labele Yoh_
    X, Y_ = data.sample_gauss_2d(C, N)
    Yoh_ = F.one_hot(torch.from_numpy(Y_), C)

    # definiraj model:
    ptlr = PTLogreg(X.shape[1], Yoh_.shape[1])

    # nauči parametre (X i Yoh_ moraju biti tipa torch.Tensor):
    train(ptlr, torch.from_numpy(X), Yoh_, 1000, 0.05)

    # dohvati vjerojatnosti na skupu za učenje
    probs = eval(ptlr, X)
    Y = np.argmax(probs, axis=1)

    # ispiši performansu (preciznost i odziv po razredima)
    accuracy, precision, recall = eval_perf_multi(Y, Y_)
    print(f'accuracy: {accuracy}, precision: {precision}, recall: {recall}')