Exemple #1
0
        temp.append(float(input("How much % alcohol did your drink have? \n")))
        temp.append(float(input("How much ml of you drink did you have? \n")))
        user_info.append(temp)
        a.array(temp[3], temp[2], temp[0], hunger)
        a.plot()
        e = Eliminator(user_info[0][3])
        e.elimination()
        e.plot()
        elimination_array = np.load("eliminated_array.npy")
        elim_time = np.array(elimination_array[minute], ndmin=2)
        bac_array.append(elimination_array[minute])

        # trains neural network every 5 inputs and lets it predict the drunkeness
        user_input_array.append(how_drunk)
        if len(bac_array) % 5 == 0:
            neural_network.learn(bac_array, user_input_array)
        if len(bac_array) == 1:
            neural_network.learn(bac_array, user_input_array)
        prediction = neural_network.predict(elim_time)
        print(prediction)
        np.save("drinking_session.npy", user_info)
    # informs the elimination rate adjuster on how the elimination rate will have to be adjusted
    elif decision == 2:
        sober_time = 0

        min = int(input("How many minutes ago did you sober up? \n"))
        sober_time = minute - min
        e.adjustment(sober_time)
    elif decision == 3:
        time = input("What time is it? (enter in hh:mm format) \n")
        hour = int((time[0] + time[1]))
Exemple #2
0
if __name__ == "__main__":

    batch, output = 10, 3
    train_percent = 80
    episodes = int(1e3)
    learning_rate = 1e-2
    encoding = [("setosa", 0), ("versicolor", 1), ("virginica", 2)]
    X, y = network.extract("iris.csv",
                           encoding=encoding,
                           output=output,
                           label="species")
    dimension = len(X[0])
    neurons = [dimension, 10, 20, 20, 5, output]
    trainer, tester, validater = network.partition(X, y, output, batch,
                                                   train_percent)
    activity = ["relu", "relu", "relu", "relu", ""]
    cost = "crossentropy"
    optimizer = "adam"
    model, error, accuracy = network.learn(trainer, neurons, activity,
                                           learning_rate, episodes, cost,
                                           optimizer)
    network.plot(
        error, "forestgreen",
        "accumulated_errors_over_each_epoch_multilabel_classification",
        "Episode", "Error")
    network.plot(accuracy, "mediumvioletred",
                 "accuracy_over_each_epoch_multilabel_classification",
                 "Episode", "Learning accuracy")
    network.test(model, tester, output)
Exemple #3
0
import neural_network as network

if __name__ == "__main__":

    X, y = network.extract("mnist", "MNIST")
    Y = [element.item() for element in y]
    output, window = len(list(set(Y))), 3
    batch, train_percent = 32, 80
    episodes, learning_rate = 10, 5e-3
    trainer, tester, validater = network.partition(X, y, output, batch,
                                                   train_percent)
    height, channel = X[0].shape[-1], X[0].shape[0]
    kernel, stride, padding = [5, 5], [1, 1], [1, 1]
    convolutions = [channel, 10, 20]
    neurons = [50, output]
    activity = ["relu", "relu"]
    activity += ["relu", ""]
    cost = "crossentropy"
    optimizer = "adam"
    model, error, accuracy = network.learn(trainer, neurons, activity,
                                           learning_rate, episodes, cost,
                                           optimizer, kernel, stride, padding,
                                           height, window, convolutions)
    network.plot(error, "forestgreen",
                 "accumulated_error_over_each_epoch_mnist_classification_cnn",
                 "Episode", "Error")
    network.plot(accuracy, "mediumvioletred",
                 "accuracy_over_each_epoch_mnist_classification_cnn",
                 "Episode", "Learning accuracy")
    network.test(model, tester, output, True)