Beispiel #1
0
Y_test = Y_test.reshape(1, Y_test.shape[0])
Y_train = Y_train.reshape(1, Y_train.shape[0])


model = LogisticRegression()
costs = model.fit(X_train, Y_train, 10000, 0.4)
# Plot learning curve (with costs)
costs = np.squeeze(costs)
plt.plot(costs)
plt.ylabel('cross entropy loss')
plt.xlabel('iterations ')
plt.title("Learning rate =" + str(0.4))
plt.show()


accuracy_train,cost_train,prediction = model.evaluate(X_train, Y_train)
accuracy_test, cost_test, prediction = model.evaluate(X_test, Y_test)
print("accuracy on train set: " + str(accuracy_train))
print("cross entropy loss on train set: " + str(cost_train))
print("accuracy on test set: " + str(accuracy_test))
print("cross entropy loss on test set: " + str(cost_test))
#
# accuracy on train set: 99.75397862689321
# cross entropy loss on train set: 0.007362215273304598
# accuracy on test set: 99.44521497919555
# cross entropy loss on test set: 0.014537294410167503

# Y_prediction = accuracy_test[2]
# incorrects= []
# for i in range(Y_prediction.shape[1]):
#     if Y_prediction[0,i] != Y_test[0,i]:
Beispiel #2
0
from model import LogisticRegression
from Titanic.dataPrepration import create_data

train_x, train_y, test_x, test_y = create_data()
#reshape
train_x = train_x.T
train_y = train_y.T
test_x = test_x.T
test_y = test_y.T

model = LogisticRegression()
model.fit(train_x, train_y, 20000, 0.2)
accuracy_train, cost_train, prediction = model.evaluate(train_x, train_y)
accuracy_test, cost_test, prediction = model.evaluate(test_x, test_y)
print("accuracy on train set: " + str(accuracy_train))
print("cross entropy loss on train set: " + str(cost_train))
print("accuracy on test set: " + str(accuracy_test))
print("cross entropy loss on test set: " + str(cost_test))