Beispiel #1
0
def train_lr():
    params = {
        "offline_model_dir": "weights/lr",
    }
    params.update(params_common)

    X_train, X_valid = load_data("train"), load_data("vali")

    model = LogisticRegression("ranking", params, logger)
    model.fit(X_train, validation_data=X_valid)
    model.save_session()
Beispiel #2
0
def train_lr():
    params = {
        "offline_model_dir": "../weights",
    }
    params.update(params_common)

    X_train, X_valid = load_data("train"), load_data("vali")
    X_test = load_data("test")
    # print(X_test['label'])

    model = LogisticRegression("ranking", params, logger)
    model.fit(X_train, validation_data=X_valid)
    model.save_session()
    model.predict(X_test, 'pred.txt')
import numpy as np
from model import LogisticRegression

# load data
x_train = np.load('./data/LR/train_data.npy')[:, 1:]
y_train = np.load('./data/LR/train_target.npy')
x_test = np.load('./data/LR/test_data.npy')[:, 1:]
y_test = np.load('./data/LR/test_target.npy')

# create an LR model and fit it
lr = LogisticRegression(learning_rate=1,
                        max_iter=10,
                        fit_bias=True,
                        optimizer='Newton',
                        seed=0)
lr.fit(x_train, y_train, val_data=(x_test, y_test))

# predict and calculate acc
train_acc = lr.score(x_train, y_train, metric='acc')
test_acc = lr.score(x_test, y_test, metric='acc')
print("train acc = {0}".format(train_acc))
print("test acc = {0}".format(test_acc))

# plot learning curve and decision boundary
lr.plot_learning_curve()
lr.plot_boundary(x_train, y_train)
lr.plot_boundary(x_test, y_test)
Beispiel #4
0
from mnist.data_prepration import create_data
from model import  LogisticRegression
import numpy as np

import matplotlib.pyplot as plt

X_train, Y_train, X_test, Y_test= create_data(1,7)
X_train = X_train.T/255
X_test = X_test.T/255
Y_test = Y_test.reshape(1, Y_test.shape[0])
Y_train = Y_train.reshape(1, Y_train.shape[0])


model = LogisticRegression()
costs = model.fit(X_train, Y_train, 10000, 0.4)
# Plot learning curve (with costs)
costs = np.squeeze(costs)
plt.plot(costs)
plt.ylabel('cross entropy loss')
plt.xlabel('iterations ')
plt.title("Learning rate =" + str(0.4))
plt.show()


accuracy_train,cost_train,prediction = model.evaluate(X_train, Y_train)
accuracy_test, cost_test, prediction = model.evaluate(X_test, Y_test)
print("accuracy on train set: " + str(accuracy_train))
print("cross entropy loss on train set: " + str(cost_train))
print("accuracy on test set: " + str(accuracy_test))
print("cross entropy loss on test set: " + str(cost_test))
#
print('Done')

temp_X_train = np.concatenate((worm_images, noworm_images))
y_train = np.concatenate((worm_label, noworm_label))

print('Shuffling images and labels ...')
X_data, y_data = shuffling_files(temp_X_train, y_train)

print('spliting data .....')
X_train, X_test = data_split(X_data)
y_train, y_test = data_split(y_data)
print('Done')

X_train = X_train / 255
X_test = X_test / 255

model = LogisticRegression(lr=0.02, epochs=500, lamb=8)
tic1 = time.time()
model.fit(X_train, y_train)
toc1 = time.time()

tic2 = time.time()
y_pred = model.predict(X_test)
toc2 = time.time()
y_pred = np.argmax(y_pred, axis=1)

print('Training Time: {}'.format(toc1 - tic1))
print('Testing Time: {}'.format(toc2 - tic2))
print('acc_test: {}'.format(accuracy_score(y_test, y_pred)))
plt.show()
Beispiel #6
0
from model import LogisticRegression
from Titanic.dataPrepration import create_data

train_x, train_y, test_x, test_y = create_data()
#reshape
train_x = train_x.T
train_y = train_y.T
test_x = test_x.T
test_y = test_y.T

model = LogisticRegression()
model.fit(train_x, train_y, 20000, 0.2)
accuracy_train, cost_train, prediction = model.evaluate(train_x, train_y)
accuracy_test, cost_test, prediction = model.evaluate(test_x, test_y)
print("accuracy on train set: " + str(accuracy_train))
print("cross entropy loss on train set: " + str(cost_train))
print("accuracy on test set: " + str(accuracy_test))
print("cross entropy loss on test set: " + str(cost_test))