Example #1
0
# HYPERPARAMETERS
batch_size = 779
num_epochs = 10000
learning_rate = 0.0001
epsilon = 0.001
gamma = 0.1

# OPTIMIZER
OPTIMIZER = 'SGD'
# ============================================================

assert DATA_NAME in ['Titanic', 'Digit']
assert OPTIMIZER in ['SGD', 'Momentum', 'RMSProp']

# Load dataset, model and evaluation metric
train_data, test_data, logistic_regression, metric = _initialize(DATA_NAME)
train_x, train_y = train_data

num_data, num_features = train_x.shape
print('# of Training data : ', num_data)

# Make model & optimizer
model = logistic_regression(num_features)
optim = optimizer(OPTIMIZER, gamma=gamma, epsilon=epsilon)

# TRAIN
loss = model.train(train_x, train_y, num_epochs, batch_size, learning_rate,
                   optim)
print('Training Loss at last epoch: %.2f' % loss)

# EVALUATION
Example #2
0
# HYPERPARAMETERS
batch_size = 100
num_epochs = 30
learning_rate = 0.01
epsilon = 0.001
gamma = 0.05
show_plot = True  # show prediction sample images for DIGIT dataset

# OPTIMIZER
OPTIMIZER = 'SGD'
# =============================================================
assert DATA_NAME in ['Digit', 'Iris']
assert OPTIMIZER in ['SGD', 'Momentum', 'RMSProp']

# Load dataset, model and evaluation metric
train_data, test_data, softmax_classifier, accuracy = _initialize(DATA_NAME)
train_x, train_y = train_data
if DATA_NAME == 'Digit':
    train_x, mean_img = train_x

num_data, num_features = train_x.shape
num_label = int(train_y.max()) + 1
print('# of Training data : %d \n' % num_data)

# Make model & optimizer
model = softmax_classifier(num_features, num_label)
optim = optimizer(OPTIMIZER, gamma=gamma, epsilon=epsilon)

# TRAIN
loss = model.train(train_x, train_y, num_epochs, batch_size, learning_rate,
                   optim)
Example #3
0
# HYPERPARAMETERS
batch_size = None
num_epochs = None
learning_rate = None
epsilon = None
gamma = None

# OPTIMIZER
OPTIMIZER = None
# =============================================================

assert DATA_NAME in ['Concrete', 'Graduate']
assert OPTIMIZER in ['SGD', 'Momentum', 'RMSProp']

# Load dataset, model and evaluation metric
train_data, test_data, linear_regression, metric = _initialize(DATA_NAME)
train_x, train_y = train_data

num_data, num_features = train_x.shape
print('# of Training data : ', num_data)

# Make model & optimizer
model = linear_regression(num_features)
optim = optimizer(OPTIMIZER, gamma=gamma, epsilon=epsilon)

# TRAIN
loss = model.train(train_x, train_y, num_epochs, batch_size, learning_rate,
                   optim)
print('Training Loss at last epoch: %.2f' % loss)

# EVALUATION
Example #4
0
num_epochs = 300
learning_rate = 0.005

# ============================================================

epsilon = 0.01  # not for SGD
gamma = 0.9  # not for SGD

# OPTIMIZER
OPTIMIZER = 'SGD'

assert DATA_NAME in ['Titanic', 'Digit', 'Basic_coordinates']
assert OPTIMIZER in ['SGD']

# Load dataset, model and evaluation metric
train_data, test_data, Perceptron, metric = _initialize(DATA_NAME)
train_x, train_y = train_data

num_data, num_features = train_x.shape
print('# of Training data : ', num_data)

# Make model & optimizer
model = Perceptron(num_features)
optim = optimizer(OPTIMIZER, gamma=gamma, epsilon=epsilon)

# TRAIN
loss = model.train(train_x, train_y, num_epochs, batch_size, learning_rate,
                   optim)
print('Training Loss at the last epoch: %.2f' % loss)

# EVALUATION
Example #5
0
from utils import _initialize, optimizer
import sklearn
from sklearn.linear_model import LinearRegression

# 1. Choose DATA : Titanic / Digit
# ========================= EDIT HERE ========================
# DATA
DATA_NAME = 'Graduate'
# ============================================================

assert DATA_NAME in ['Concrete', 'Graduate']

# Load dataset, model and evaluation metric
train_data, test_data, _, metric = _initialize(DATA_NAME)
train_x, train_y = train_data

num_data, num_features = train_x.shape
print('# of Training data : ', num_data)
MSE = 0.0
# ========================= EDIT HERE ========================
# Make model & optimizer
x = train_x
y = train_y.reshape(x.shape[0], 1)
test_x, test_y = test_data
test_y = test_y.reshape(test_x.shape[0], 1)
# TRAIN
model = LinearRegression().fit(x, y)
# EVALUATION
MSE = sklearn.metrics.mean_squared_error(test_y, model.predict(test_x))
# ============================================================