Ejemplo n.º 1
0
def mlp(x, y, x_test, hidden_units=128):

    mlp = MLPClassifier(hidden_layer_sizes=(hidden_units, hidden_units,
                                            hidden_units),
                        max_iter=1000,
                        solver='lbfgs')

    scaler = preprocessing.StandardScaler()

    x = scaler.fit_transform(x)

    x_test = scaler.transform(x_test)

    mlp.fit(x, y)

    return mlp._predict(x_test)[:,
                                0], get_best_threshold(y,
                                                       mlp._predict(x)[:, 0])
Ejemplo n.º 2
0
# load and train
iris = datasets.load_iris()
X = iris.data
y = iris.target
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3)
num_inputs = X.shape[1]
hidden_layers = (2, 2)  #10, 10, 10 would be 3 layers of 10
model = MLPClassifier(activation='identity',
                      hidden_layer_sizes=hidden_layers,
                      max_iter=4000,
                      random_state=1)
model.out_activation_ = "identity"
model.fit(X_train, y_train)
predictions = model.predict(X_test)
pred = model._predict(X_test)
error = (y_test != predictions).sum()
print("Trained Neural Network with accuracy of " +
      str(100 - (error / len(predictions))) + ".\n")
weights = np.array(model.coefs_)
bias = np.array(model.intercepts_)

temp_left_nodes = []
for m in range(len(X_test)):
    left_nodes = [(x) for x in X_test[m]]

    #For each layer of weights
    for i in range(weights.shape[0]):
        for k in range(weights[i].shape[1]):
            sum = None
            # for each ROW of the matrix