Beispiel #1
0
    def test_on_batch(self, test_batch_data, test_batch_label, train_mode = False):
        predictions = self.foward_pass(test_batch_data, train_mode = train_mode)

        loss = np.mean(objective(self.loss).forward(predictions, test_batch_label))
        acc  = objective(self.loss).accuracy(predictions, test_batch_label)

        return loss, acc
Beispiel #2
0
    def train_on_batch(self, train_batch_data, train_batch_label):
        predictions = self.foward_pass(train_batch_data, train_mode = True)

        loss = np.mean(objective(self.loss).forward(predictions, train_batch_label))
        acc  = objective(self.loss).accuracy(predictions, train_batch_label)

        self.backward_pass(objective(self.loss).backward(predictions, train_batch_label))

        return loss, acc
Beispiel #3
0
    def __init__(self,
                       epochs,
                       loss           = 'binary_crossentropy',
                       init_method    = 'he_normal',
                       optimizer      = {},
                       penalty        = 'lasso',
                       penalty_weight = 0,
                       l1_ratio       = 0.5):

        self.epochs         = epochs
        self.loss           = objective(loss)
        self.init_method    = init(init_method)
        self.optimizer      = optimize(optimizer)
        self.activate       = activation('sigmoid')
        self.regularization = regularize(penalty, penalty_weight, l1_ratio = l1_ratio)
Beispiel #4
0
    def __init__(self,
                 epochs,
                 loss='mean_squared_error',
                 init_method='he_uniform',
                 optimizer={},
                 penalty='ridge',
                 penalty_weight=0.5,
                 l1_ratio=0.5):

        self.epochs = epochs
        self.loss = objective(loss)
        self.init_method = init(init_method)
        self.optimizer = optimize(optimizer)
        self.regularization = regularize(penalty,
                                         penalty_weight,
                                         l1_ratio=l1_ratio)
    input_data, input_label, test_size=0.3)

opt = register_opt(optimizer_name='sgd', momentum=0.01, learning_rate=0.001)

# model definition
model = LinearRegression(epochs=100,
                         optimizer=opt,
                         penalty='l1',
                         penalty_weight=0.8)
fit_stats = model.fit(train_data, train_label)

# fit_stats = model.fit_OLS(train_data, train_label) # ---- Ordinary Least Squares Method

targets = np.expand_dims(test_label, axis=1)
predictions = np.expand_dims(model.predict(test_data), axis=1)
mse = objective('mean_squared_error').forward(predictions, targets)

print('Mean Squared Error: {:.2f}'.format(mse))

model_name = 'boston_linear_regression'
plot_metric('accuracy_loss',
            len(fit_stats['train_loss']),
            fit_stats['train_acc'],
            fit_stats['train_loss'],
            model_name=model_name,
            legend=['acc', 'loss'])

plot_regression_results(train_data,
                        train_label,
                        test_data,
                        test_label,