from MLlib.models import PolynomialRegression
from MLlib.optimizers import Adam
from MLlib.loss_func import MeanSquaredError
from MLlib.utils.misc_utils import read_data, printmat

X, Y = read_data('datasets/Polynomial_reg.txt')

polynomial_model = PolynomialRegression(3)  # degree as user's choice

optimizer = Adam(0.01, MeanSquaredError)

polynomial_model.fit(X, Y, optimizer=optimizer, epochs=200, zeros=True)

printmat('predictions', polynomial_model.predict(X))

Z = polynomial_model.predict(X)

polynomial_model.save('test')

polynomial_model.plot(X, Y, Z, optimizer=optimizer, epochs=200, zeros=True)
from MLlib.models import LinearRegression
from MLlib.optimizers import Adam
from MLlib.loss_func import MeanSquaredError
from MLlib.utils.misc_utils import read_data, printmat

X, Y = read_data('datasets/linear_reg_00.txt')

linear_model = LinearRegression()

optimizer = Adam(0.01, MeanSquaredError)

linear_model.fit(X, Y, optimizer=optimizer, epochs=200, zeros=False)

printmat('predictions', linear_model.predict(X))

linear_model.save('test')
Example #3
0
from MLlib import Tensor
from MLlib.regularizer import LinearRegWith_Regularization
from MLlib.regularizer import L1_Regularizer
from MLlib.optim import SGDWithMomentum
from MLlib.utils.misc_utils import printmat
import numpy as np

np.random.seed(5322)

x = Tensor.randn(10, 8)  # (batch_size, features)

y = Tensor.randn(10, 1)

reg = LinearRegWith_Regularization(8,
                                   L1_Regularizer,
                                   optimizer=SGDWithMomentum,
                                   Lambda=7)

# Regularizer,optimizer and Lambda as per user's choice

printmat("Total Loss", reg.fit(x, y, 800))