Beispiel #1
0
def ex_2_1(X_train, y_train, X_test, y_test):
    """
    Solution for exercise 2.1
    :param X_train: Train set
    :param y_train: Targets for the train set
    :param X_test: Test set
    :param y_test: Targets for the test set
    :return:
    """

    # >>> from sklearn.neural_network import MLPClassifier
    # >>> from sklearn.datasets import make_classification
    # >>> from sklearn.model_selection import train_test_split
    # >>> X, y = make_classification(n_samples=100, random_state=1)
    # >>> X_train, X_test, y_train, y_test = train_test_split(X, y, stratify=y,
    # ...                                                     random_state=1)
    # >>> clf = MLPClassifier(random_state=1, max_iter=300).fit(X_train, y_train)
    # >>> clf.predict_proba(X_test[:1])
    # array([[0.038..., 0.961...]])
    # >>> clf.predict(X_test[:5, :])
    # array([1, 0, 1, 0, 1])
    # >>> clf.score(X_test, y_test)
    # 0.8...

    ## TODO
    train_accuracy = [0, 0, 0, 0, 0]
    test_accuracy = [0, 0, 0, 0, 0]
    clf = [0, 0, 0, 0, 0]
    for seed in range(1, 6):
        clf[seed - 1] = MLPClassifier(hidden_layer_sizes=(10, ),
                                      activation='tanh',
                                      max_iter=50,
                                      random_state=(seed * 3))
        clf[seed - 1].fit(X_train, y_train)
        train_accuracy[seed - 1] = clf[seed - 1].score(X_train, y_train)
        test_accuracy[seed - 1] = clf[seed - 1].score(X_test, y_test)

        print(train_accuracy[seed - 1])
        print(test_accuracy[seed - 1])
    plot_boxplot(train_accuracy, test_accuracy)

    y_test_pred = clf[4].predict(X_test)
    confusion = confusion_matrix(y_test, y_test_pred)

    # plot misclass
    misclass_coun = 0
    for ix, y in enumerate(y_test):
        if (y != y_test_pred[ix]):
            plot_image(X_test[ix])
            misclass_coun += 1
        if misclass_coun == 3:
            break

    # TODO: plot missclassified images based on confusion matrix
    print(confusion)

    # TODO: plot weights between input and hidden
    plot_hidden_layer_weights(clf[4].coefs_[0])

    pass
def ex_2_1(input2, target2):
    """
    Solution for exercise 2.1
    :param input2: The input from dataset2
    :param target2: The target from dataset2
    :return:
    """

    #declaring variables used for MLPClassifier
    hidden_layers = 6
    solver_mode = 'adam'
    activation_mode = 'tanh'
    max_iter = 200

    cf = MLPClassifier(hidden_layer_sizes=(hidden_layers, ),
                       solver=solver_mode,
                       activation=activation_mode,
                       max_iter=max_iter)

    #training the classifier
    cf.fit(input2, target2[:, 1])

    #calculate y_predicted and y_true for confusion matrix calculation

    #printing confusion matrix
    print(confusion_matrix(target2[:, 1], cf.predict(input2)))

    #plotting the hidden layer weights
    plot_hidden_layer_weights(cf.coefs_[0])

    pass
Beispiel #3
0
def ex_2_1(input2, target2):
    '''
    • Write code to train a feed-forward neural network with 1 hidden layers containing 6 hidden units
      for pose recognition. Use dataset2 for training after normalization, ‘adam’ as the training solver and
      train for 200 iterations.
    • Calculate the confusion matrix
    • Plot the weights between each input neuron and the hidden neurons to visualize what the network
      has learnt in the first layer.
      inote Use scikit-learn’s confusion_matrix function to to calculate the confusion matrix. Documentation
      for this can be found here
      inote You can use the coefs_ attribute of the model to read the weights. It is a list of length nlayers − 1
      where the ith element in the list represents the weight matrix corresponding to layer i.
      inote Use the plot_hidden_layer_weights in nn_classification_plot.py to plot the hidden weights.
    '''

    # dataset2 = normalize(input2) already done by main
    x_train = input2
    y_train = target2[:, 1]
    # print(y_train)
    nn = MLPClassifier(solver='adam',
                       activation='tanh',
                       max_iter=200,
                       hidden_layer_sizes=(6, ))
    nn.fit(x_train, y_train)
    cm = confusion_matrix(y_train, nn.predict(x_train))
    plot_hidden_layer_weights(nn.coefs_[0])
    print(cm)
    pass
def ex_2_1(input2, target2):
    """
    Solution for exercise 2.1
    :param input2: The input from dataset2
    :param target2: The target from dataset2
    :return:
    """
    # parse target2 2nd column
    pose2 = []
    for target in target2:
        pose2.append(target[1])

    mlp = MLPClassifier(activation='tanh', hidden_layer_sizes=6)
    print("===========fit started===========")
    mlp.fit(input2, pose2)
    print("===========fit finished===========")
    print("classes_: ", mlp.classes_)
    print("n_layers_: ", mlp.n_layers_)
    plot_hidden_layer_weights(mlp.coefs_[0])

    print("===========predict started===========")
    prediction = mlp.predict(input2)
    print("===========predict finished===========")
    cnf_matrix = confusion_matrix(pose2, prediction)
    print(cnf_matrix)
    return
def ex_2_1(input2, target2):
    ## TODO
    classifier = MLPClassifier(hidden_layer_sizes=(6, ),
                               solver="adam",
                               max_iter=200,
                               activation="tanh")

    classifier.fit(input2, target2[:, 1])
    con_mat = confusion_matrix(target2[:, 1], classifier.predict(input2))
    plot_hidden_layer_weights(classifier.coefs_[0])
Beispiel #6
0
def ex_2_1(input2, target2):
    """
    Solution for exercise 2.1
    :param input2: The input from dataset2
    :param target2: The target from dataset2
    :return:
    """
    ## TODO
    pose = target2[:,1]
    nn = MLPClassifier(hidden_layer_sizes=(6,) ,activation='tanh', max_iter=200)
    nn.fit(input2, pose) 
    y_pred = nn.predict(input2)
    C = confusion_matrix(pose, y_pred, labels=None, sample_weight=None)
    plot_hidden_layer_weights(nn.coefs_[0])
    return C
def ex_2_1(input2, target2):
    """
    Solution for exercise 2.1
    :param input2: The input from dataset2
    :param target2: The target from dataset2
    :return:
    """
    ## TODO
    n_hidden_neurons = 6
    nn = MLPClassifier(activation='tanh', solver='adam', max_iter=200, hidden_layer_sizes=(n_hidden_neurons,))
    target = target2[:,2]
    ## Train the network
    nn.fit(input2, target)
    predictions = nn.predict(input2)
    C=confusion_matrix(target,predictions)
    hidden_layer_weights = nn.coefs_[0]
    plot_hidden_layer_weights(hidden_layer_weights)
    print(C)
Beispiel #8
0
def ex_2_1(input2, target2):

    target2 = np.transpose(target2)
    target2 = target2[1]
    nn = MLPClassifier(hidden_layer_sizes=(8, ),
                       activation='tanh',
                       solver='adam',
                       max_iter=200)

    model = nn.fit(input2, target2)

    y_predict = model.predict(input2)

    C = confusion_matrix(y_predict, target2)
    print(C)
    hidden_layer_weights = model.coefs_

    plot_hidden_layer_weights(hidden_layer_weights[0])

    pass
def ex_2_1(input2, target2):
    """
    Solution for exercise 2.1
    :param input2: The input from dataset2
    :param target2: The target from dataset2
    :return:
    """

    classifier = MLPClassifier(hidden_layer_sizes=(6, ),
                               activation='tanh',
                               solver='adam',
                               max_iter=200)
    classifier.fit(input2, target2[:, 1])
    pred2 = classifier.predict(input2)
    confmat = confusion_matrix(target2[:, 1], pred2)
    coefs = classifier.coefs_
    print(confmat)
    plot_hidden_layer_weights(coefs[0])
    ## TODO
    pass
Beispiel #10
0
def ex_2_2(input1, target1, input2, target2):

    target1 = np.transpose(target1)
    target1 = target1[0]
    target2 = np.transpose(target2)
    target2 = target2[0]

    acc_train = np.zeros((10, ))
    acc_test = np.zeros((10, ))
    max = -1
    for i in range(10):
        nn = MLPClassifier(random_state=i,
                           hidden_layer_sizes=(20, ),
                           activation='tanh',
                           solver='adam',
                           max_iter=1000)

        model = nn.fit(input1, target1)
        acc_train[i] = model.score(input1, target1)
        acc_test[i] = model.score(input2, target2)
        if acc_test[i] > max:
            max = acc_test[i]
            y_predict = model.predict(input2)
            C = confusion_matrix(target2, y_predict)
    k = 0
    for i, a in enumerate(target2):
        if a != y_predict[i] and k < 20:
            plot_image(input2[i])
            k = k + 1

    hidden_layer_weights = model.coefs_
    plot_hidden_layer_weights(hidden_layer_weights[0])

    plot_histogram_of_acc(acc_train, acc_test)
    print(C)

    pass
Beispiel #11
0
def ex_2_1(X_train, y_train, X_test, y_test):
    """
    Solution for exercise 2.1
    :param X_train: Train set
    :param y_train: Targets for the train set
    :param X_test: Test set
    :param y_test: Targets for the test set
    :return:
    """

    randomSeed = np.random.randint(1, 100, 1)

    n_hidd = [100]

    score_train, score_test = [], []

    best_score = 0

    bestNetwork = MLPClassifier()

    classes = [
        "T-shirt/top", "trousers/pants", "pullover shirt", "dress", "coat",
        "sandal", "shirt", "sneaker", "bag", "ankle boot"
    ]

    for n, n_h in enumerate(n_hidd):
        for s, seed in enumerate(randomSeed):
            nn = MLPClassifier(hidden_layer_sizes=(n_h, ),
                               activation='tanh',
                               max_iter=50,
                               random_state=seed)

            nn.fit(X_train, y_train)

            scoretrain = nn.score(X_train, y_train)
            scoretest = nn.score(X_test, y_test)

            score_train.append(scoretrain)
            score_test.append(scoretest)

            if scoretest > best_score:
                bestNetwork = nn
                best_score = scoretest

            print(
                100 / (len(n_hidd) * len(randomSeed)) *
                ((n * len(randomSeed)) + (s + 1)), "%")

    plot_boxplot(score_train, score_test)

    prediction = bestNetwork.predict(X_test)
    confusionMatrix = confusion_matrix(y_test, prediction)

    #confusion matrix
    print("Confusion matrix:")
    print(classes)

    print(confusionMatrix)

    #Weight
    print(len(bestNetwork.coefs_))

    plot_hidden_layer_weights(bestNetwork.coefs_[0])

    print("Misclassified Pictures")

    falseList = prediction == y_test

    indexPosList = []

    for i, index in enumerate(falseList):
        if index == False:
            indexPosList.append(i)

    print(indexPosList)

    for i in range(5):
        print("MLPClassifer think it is", prediction[indexPosList[i]] + 1,
              "but it is", y_test[indexPosList[i]] + 1)
        plot_image(X_test[indexPosList[i]])
    ## TODO
    pass