from MLlib.models import LinearRegression from MLlib.optimizers import Adam from MLlib.loss_func import MeanSquaredError from MLlib.utils.misc_utils import read_data, printmat X, Y = read_data('datasets/linear_reg_00.txt') linear_model = LinearRegression() optimizer = Adam(0.01, MeanSquaredError) linear_model.fit(X, Y, optimizer=optimizer, epochs=200, zeros=False) printmat('predictions', linear_model.predict(X)) linear_model.save('test')
from MLlib.models import PolynomialRegression from MLlib.optimizers import Adam from MLlib.loss_func import MeanSquaredError from MLlib.utils.misc_utils import read_data, printmat X, Y = read_data('datasets/Polynomial_reg.txt') polynomial_model = PolynomialRegression(3) # degree as user's choice optimizer = Adam(0.01, MeanSquaredError) polynomial_model.fit(X, Y, optimizer=optimizer, epochs=200, zeros=True) printmat('predictions', polynomial_model.predict(X)) Z = polynomial_model.predict(X) polynomial_model.save('test') polynomial_model.plot(X, Y, Z, optimizer=optimizer, epochs=200, zeros=True)
from MLlib.utils.misc_utils import read_data from MLlib.models import Numerical_outliers x, y = read_data("datasets/numerical_outliers.txt") Numerical_outliers.get_outliers(y[0])
from MLlib.models import LogisticRegression from MLlib.optimizers import Adam from MLlib.loss_func import LogarithmicError from MLlib.utils.misc_utils import read_data, printmat X, Y = read_data('datasets/logistic_reg_00.txt') linear_model = LogisticRegression() optimizer = Adam(0.03, LogarithmicError) linear_model.fit(X, Y, optimizer=optimizer, epochs=200, zeros=False) printmat('predictions', linear_model.predict(X)) linear_model.Plot(X, Y, linear_model.classify(X), optimizer=optimizer, epochs=200, zeros=False) linear_model.save('test')
from MLlib.models import LogisticRegression from MLlib.optimizers import Adam from MLlib.loss_func import LogarithmicError from MLlib.utils.misc_utils import read_data, printmat X, Y = read_data('MLlib/datasets/logistic_reg_00.txt') linear_model = LogisticRegression() optimizer = Adam(0.03, LogarithmicError) linear_model.fit(X, Y, optimizer=optimizer, epochs=200, zeros=False) printmat('predictions', linear_model.predict(X)) linear_model.save('test')
from MLlib.models import LinearRegression from MLlib.optimizers import Adam from MLlib.loss_func import MeanSquaredError from MLlib.utils.misc_utils import read_data, printmat X, Y = read_data('MLlib/datasets/linear_reg_00.txt') linear_model = LinearRegression() optimizer = Adam(0.001, MeanSquaredError) linear_model.fit(X, Y, optimizer=optimizer, epochs=200, zeros=False) printmat('predictions', linear_model.predict(X)) linear_model.save('test')
import numpy as np from MLlib.metrics import matrix_evolution from MLlib.utils.misc_utils import read_data p, y = read_data("datasets/metrics_dataset.txt") z = np.transpose(p) x = z[0] matrix = matrix_evolution.confusion_matrix(x, y[0]) print(matrix) matrix_evolution.score_metrics(x, y[0])
from MLlib.utils.misc_utils import read_data from MLlib.models import z_score x, y = read_data("datasets/z_score_dataset.txt") z_score.get_outlier(y[0], threshold_value=3) # threshold_value as per user's choice