Esempio n. 1
0
    def __init__(self, name):
        self.model = None
        if name == Constants.logistic:
            self.model = LogisticRegression()

        elif name == Constants.linear:
            self.model = LinearRegression()

        elif name == Constants.GBM:
            self.model = GradientBoosting()

        elif name == Constants.naiveBayes:
            self.model = NaiveBayes()

        elif name == Constants.decisionTree:
            self.model = DecisionTree()

        elif name == Constants.randomForest:
            self.model = RandomForest()

        elif name == Constants.SVM:
            self.model = SVM()

        elif name == Constants.xgboost:
            self.model = XGBoost()
Esempio n. 2
0
def trainClassifiers(data, algorithm):
    x, t = Initializing.processData(data)
    labels = np.unique(t)
    listW = []
    listB = []
    for i in range(len(labels)):
        tmpData = processDataSetPerClass(data, i, algorithm)
        tmpX, tmpT = Initializing.processData(tmpData)
        w, b = Initializing.initialParam(tmpX)
        if algorithm == 'perceptron':
            w, b = Perceptron.train(tmpX, tmpT, w, b)
        elif algorithm == 'logistic':
            # w, b = LogisticRegression.train(tmpX, tmpT, w, b)
            LogisticRegression.bestW, LogisticRegression.bestB = Initializing.initialParam(tmpX)
            trainingSet, testSet = CrossValidation.makeSets(tmpData)  # Napravimo trening i test set
            kTrainingSets, kValidSets = CrossValidation.kCrossValidationMakeSets(trainingSet, 5)  # Napravimo k trening i test set-ova unakrsnom validacijom (k = 5)
            w, b = LogisticRegression.crossTrain(kTrainingSets, kValidSets)  # Istreniramo k trening setova i kao rezultat vratimo najbolje w i najbolje b  (ono w i b za koje je greska bila najmanja)
        else:
            trainingSet, testSet = CrossValidation.makeSets(tmpData)  # Napravimo trening i test set
            kTrainingSets, kValidSets = CrossValidation.kCrossValidationMakeSets(trainingSet, 5)  # Napravimo k trening i test set-ova unakrsnom validacijom (k = 5)
            c = PassiveAggressiveAlgorithm.optC(kTrainingSets, kValidSets)  # Podesimo optimalni parametar c
            w, b = PassiveAggressiveAlgorithm.crossTrain(kTrainingSets, kValidSets, c)  # Istreniramo k trening setova i kao rezultat vratimo najbolje w i najbolje b  (ono w i b za koje je greska bila najmanja)
        listW.append(np.array(w[0]).reshape(1,len(w[0])))
        listB.append(np.array(b[0]).reshape(1,1))
    return [listW, listB]
 def plotLogisticRegression(self):
     PlotInWindow.fig = Figure()
     self.labelTitle["text"] = "Logistic Regression Algorithm"
     file = self.labelForFile["text"]
     file = ReadingFromFile.checkFile(file, "logistic")
     fig = LogisticRegression.logisticRegressionPlotInWindow(file)
     self.drawFigure(fig)
Esempio n. 4
0
def plotAlgorithmInWindow(file, algorithm):
    data = ReadingFromFile.readDataFromFile(file, ',')  # Podaci ucitani iz fajla
    trainingSet, testSet = CrossValidation.makeSets(data)
    x, t = Initializing.processData(trainingSet)
    t = Initializing.checkLabels(t, algorithm)
    kTrainingSets, kValidSets = CrossValidation.kCrossValidationMakeSets(trainingSet, 5)  # Napravimo k trening i test set-ova unakrsnom validacijom (k = 5)

    if algorithm == "perceptron":
        w, b = Initializing.initialParam(x)
        w, b = Perceptron.train(x, t, w, b)
    elif algorithm == "logistic":
        w, b = LogisticRegression.crossTrain(kTrainingSets,kValidSets)  # Istreniramo k trening setova i kao rezultat vratimo najbolje w i najbolje b  (ono w i b za koje je greska bila najmanja)
    else:
        c = PassiveAggressiveAlgorithm.optC(kTrainingSets, kValidSets)  # Podesimo optimalni parametar c
        w, b = PassiveAggressiveAlgorithm.crossTrain(kTrainingSets, kValidSets, c)  # Istreniramo k trening setova i kao rezultat vratimo najbolje w i najbolje b  (ono w i b za koje je greska bila najmanja)

    xTest, tTest = Initializing.processData(testSet)
    fig = PlotInWindow.plotInWindow(xTest, w, b, tTest)
    return fig
Esempio n. 5
0
def logisticRegressionExample(file):
    file = ReadingFromFile.checkFile(file, "logistic")
    data = ReadingFromFile.readDataFromFile(file,
                                            ',')  # Podaci ucitani iz fajla
    trainingSet, testSet = CrossValidation.makeSets(
        data)  # Napravimo trening i test set
    kTrainingSets, kValidSets = CrossValidation.kCrossValidationMakeSets(
        trainingSet,
        5)  # Napravimo k trening i test set-ova unakrsnom validacijom (k = 5)

    w, b = LogisticRegression.crossTrain(
        kTrainingSets, kValidSets
    )  # Istreniramo k trening setova i kao rezultat vratimo najbolje w i najbolje b  (ono w i b za koje je greska bila najmanja)
    x, t = Initializing.processData(
        testSet)  # Rezultat crtamo i merimo nad test skupom podataka
    t = Initializing.checkLabels(t, "logistic")
    Plot.plotData(x, t)
    Plot.plotLine(x, w, b)
    plt.show()
Esempio n. 6
0
#  __Date__     : 1/5/2020.
#  __Author__   :  CodePerfectPlus
#  __Package__  :  Python 3
#  __GitHub__   : https://www.github.com/codeperfectplus
#
from Algorithms import LogisticRegression

lr = LogisticRegression()

X = [2, 3, 5]
y = [3, 5, 3]
lr.fit(X, y)