def __init__(self, epochs, loss = 'mean_squared_error', init_method = 'he_uniform', optimizer = {}, penalty = 'ridge', penalty_weight = 0.5, l1_ratio = 0.5): self.epochs = epochs self.loss = objective(loss) self.init_method = init(init_method) self.optimizer = optimize(optimizer) self.regularization = regularize(penalty, penalty_weight, l1_ratio = l1_ratio)
def __init__(self, epochs, loss='binary_crossentropy', init_method='he_normal', optimizer={}, penalty='lasso', penalty_weight=0, l1_ratio=0.5): self.epochs = epochs self.loss = objective(loss) self.init_method = init(init_method) self.optimizer = optimize(optimizer) self.activate = activation('sigmoid') self.regularization = regularize(penalty, penalty_weight, l1_ratio=l1_ratio)
test_size = 0.3) opt = register_opt(optimizer_name = 'sgd', momentum = 0.01, learning_rate = 0.001) model = PolynomialRegression(degree = 5, epochs = 100, optimizer = opt, penalty = 'elastic', penalty_weight = 0.5, l1_ratio = 0.3) fit_stats = model.fit(train_data, train_label) targets = np.expand_dims(test_label, axis = 1) predictions = np.expand_dims(model.predict(test_data), axis = 1) mse = objective('mean_squared_error').forward(predictions, targets) print('Mean Squared Error: {:.2f}'.format(mse)) plot_metric('Accuracy vs Loss', len(fit_stats['train_loss']), fit_stats['train_acc'], fit_stats['train_loss'], legend = ['acc', 'loss']) plot_regression_results(train_data, train_label, test_data, test_label, input_data, model.predict(input_data), mse, 'Polynomial Regression',