Esempio n. 1
0
def chart_job(s, i, j):
    learning_data, testing_data = load_wine(test_count=20)
    net = nn.NeuralNetwork(0.01, 100, [*s], 1.04, 1.05, 0.7,
                           0.020)  # int((s[0]*s[1])*5)
    net.feed_training_data(*learning_data)
    net.feed_test_data(*testing_data)
    net.start_learning()
    prediction, cost = net.test(*testing_data)

    return (i, j, cost, s)
Esempio n. 2
0
def main():
    args = parse_arguments()
    #1. Load the data (Iris Set)
    #train_set, test_set = dl.load_diab()
    #train_set, test_set = dl.load_susy()
    #train_set, test_set = dl.load_iris()
    train_set, test_set = dl.load_wine()
    Xtrain = train_set[0]
    ytrain = train_set[1]
    Xtest = test_set[0]
    ytest = test_set[1]
    # dl.load_wine()
    #Learn the weights
    #weights = pa.fit(Xtrain, ytrain, float(sys.argv[1]))
    #yhat = pa.predict(Xtest, weights)
    #weights = sgd.fit(Xtrain, ytrain, int(sys.argv[1]))
    #yhat = sgd.predict(Xtest, weights)
    #algs = {'batch': pa.BatchGradientDescent(), 'stochastic': pa.StochasticGradientDescent()}
    #algs = {'logist': pa.LogisticRegression()}
    algs = {'softmax': pa.SoftMaxRegression()}
    params = {
        'tolerance': args.tolerance,
        'epoch': args.epoch
    }  #'steps': args.steps}
    #use .format for printing
    for learner_name, learner in algs.iteritems():
        print(learner_name, learner)
        learner.reset(params)
        learner.fit(Xtrain, ytrain)
        yhat = learner.predict(Xtest)
        print('yhat: {}'.format(yhat))
        print('y: {}'.format(ytest))
        print(accuracy_score(ytest, yhat))
        #print(learner.err)
        #plt.plot(learner.err)
        #plt.show()
        from sklearn.linear_model import LogisticRegression
        lg = LogisticRegression(multi_class='multinomial',
                                solver='newton-cg',
                                random_state=42,
                                verbose=1,
                                max_iter=1000,
                                penalty="l2")
        lg.fit(Xtrain, ytrain)
        yhat = lg.predict(Xtest)
        print("----" * 50)
        print("Testing scikit logistic regression")
        print('yhat: {}'.format(yhat))
        print('y: {}'.format(ytest))
        print(accuracy_score(ytest, yhat))
Esempio n. 3
0
import matplotlib.pyplot as plt
import numpy as np
import neural_network as nn
from data_loader import load_wine

if __name__ == "__main__":
    net = nn.NeuralNetwork(None, None, None, None, None, None, None)
    net.load_model('models/000100_10_6.mdl')

    plt.figure()
    for i in range(12):
        P, T = load_wine(test_count=20)[1]
        prediction, cost = net.test(P, T)

        plt.subplot(3, 4, i + 1)
        plt.grid(linestyle='--')
        plt.yticks(np.arange(min(T), max(T) + 0.01, 0.25))
        plt.plot(prediction)
        plt.plot(T)
        plt.title(f'Cost: {cost:.8f}')
    plt.show()
Esempio n. 4
0
if __name__ == "__main__":
    # a, b = range(10, 101, 20), range(10, 101, 20)
    a, b = range(2, 10), range(2, 10)
    S1, S2 = np.meshgrid(a, b)
    c, t = 1, len(a) * len(b)
    PK = np.empty(S1.shape, dtype=int)

    plt.figure()
    ax = plt.axes(projection='3d')
    ax.set_xlabel('S1')
    ax.set_ylabel('S2')
    ax.set_zlabel('%PK')
    ax.set_zlim(0, 100)

    learning_data, testing_data = load_wine(test_count=20)

    start = time()

    for i, row in enumerate(zip(S1, S2)):
        for j, s in enumerate(zip(*row)):
            while True:
                learning_data, testing_data = learning_data.copy(
                ), testing_data.copy()
                net = nn.NeuralNetwork(0.01, int(s[0] * s[1] / 4), [*s], 1.04,
                                       1.05, 0.7, 0.020)
                net.feed_training_data(*learning_data)
                net.feed_test_data(*testing_data)
                net.start_learning(live_text=True)
                prediction = net.test(*testing_data)[0]
                pk = [
    # Calculate and Accumulate accuracy
    acc_mdc = evaluation.calculate_accuracy(Yts, Yh)
    mean_acc_mdc = mean_acc_mdc + acc_mdc

# Mean Accuracy
mean_acc_mdc = mean_acc_mdc / N

print(
    'Mean Accuracy of MD Classifier, with Iris Dataset and Leave-one-out Method:'
)
print(mean_acc_mdc)

# ########################## Experiment 2 - Wine, KNN, Random Subsampling

# Load DataSet
X, Y = data_loader.load_wine()

# Define number of realizations (for random subsampling)
Nrealizations = 10

# Init Output
mean_acc_knn = 0

for i in range(Nrealizations):

    # Split data between train and test
    Xtr, Ytr, Xts, Yts = hold_out.random_subsampling(X,
                                                     Y,
                                                     train_size=0.8,
                                                     random_state=None)