Exemple #1
0
def main():
    # X, types, y = ToFormNumpy("D:\\german.txt")
    X, types, y = ToFormNumpy("D:\\german1.txt")
    # X, types, y = ToFormNumpy("D:\\tanlanmalar\\german.txt")

    #minmax_scale(X, copy=False)
    #Normalizing_Estmation(X, y)

    y -= 1

    _, ln = np.unique(y, return_counts=True)

    w = Lagranj1(X, y)

    print(w)

    res = compactness(X, y, types=types, metric=1)

    print(res)

    while X.shape[1] > 2:
        cond = w != w.min()
        X = X[:, cond]
        w = w[cond]

        res = compactness(X, y, types=types, metric=1)
        print(res)
Exemple #2
0
def main():
    #X, types, y = ToFormNumpy("D:\\tanlanmalar\\gasterlogy1394.txt")
    #X, types, y = ToFormNumpy("D:\\tanlanmalar\\spame.txt")
    #X, types, y = ToFormNumpy("D:\\tanlanmalar\\MATBIO_MY.txt")
    X, types, y = ToFormNumpy(
        r"D:\Nuu\AI\Selections\Amazon_initial_50_30_10000\data.txt")

    metric = 1

    minmax_scale(X, copy=False)

    w = Lagranj_nd(X, y)

    value = w.min()
    X_Test = np.array(X[:, w == value])
    types_Test = np.array(types[w == value])

    i = 0

    while X_Test.shape[1] < 2000:

        value = np.min(w[w > value])

        X_Test = X[:, w <= value]
        types_Test = types[w <= value]

        noisy = find_noisy(X_Test, y, types=types_Test, metric=metric)

        cond = np.logical_not(noisy)

        print("\nnoisy = ", len(noisy[noisy == True]))

        compactness(X_Test[cond], y[cond], types=types_Test, metric=metric)

        i += 1
Exemple #3
0
def main():
    path_train = r"D:\Nuu\AI\Selections\gene-expression\data_train.csv"
    path_test = r"D:\Nuu\AI\Selections\gene-expression\data_test.csv"

    X_train, types_train, y_train = ReadFromCSVWithHeaderClass(path_train)
    X_test, types_test, y_test = ReadFromCSVWithHeaderClass(path_test)

    #minmax_scale(X_train, copy=False)
    #minmax_scale(X_test, copy=False)
    Normalizing_Estmation(X_train, y_train, types_train)

    print(X_train)

    _, ln = np.unique(y_train, return_counts=True)

    w = Lagranj1(X_train, y_train)

    value = w.max()
    cond = w == value
    while len(cond[cond == True]) <= X_train.shape[1]:
        value = np.max(w[w < value])
        cond = w >= value

        compactness(X_train[:, cond], y_train)

    return 0

    #cond = [356, 2266, 2358, 2641, 4049, 6280]
    #cond = [356, 2266, 2358, 2641, 2724, 4049]
    #cond = [356, 2266, 2641, 3772, 4049, 4261]
    #cond = [4847]

    #X_train = X_train[:, cond]
    #X_test = X_test[:, cond]

    nnc1 = NearestNeighborClassifier_(noisy=True)
    nnc2 = NearestNeighborClassifier()
    nnc3 = TemplateClassifier(noisy=True)
    nn = MLPClassifier()
    svm = SVC()

    nnc1.fit(X_train, y_train)
    nnc2.fit(X_train, y_train)
    nnc3.fit(X_train, y_train)
    svm.fit(X_train, y_train)
    nn.fit(X_train, y_train)

    mean1 = nnc1.score(X_test, y_test)
    mean2 = nnc2.score(X_test, y_test)
    mean3 = nnc3.score(X_test, y_test)

    mean4 = svm.score(X_test, y_test)
    mean5 = nn.score(X_test, y_test)

    print("NearestNeighborClassifier_", mean1)
    print("NearestNeighborClassifier", mean2)
    print("TemplateClassifier", mean3)
    print("SVC", mean4)
    print("MLPClassifier", mean5)
Exemple #4
0
def main():
    #X, types, y = ToFormNumpy("D:\\tanlanmalar\\gasterlogy1394.txt")
    X, types, y = ToFormNumpy(r"D:\Nuu\AI\Selections\Amazon_initial_50_30_10000\data.txt")

    #y[y == 2] = 1

    minmax_scale(X, copy=False)
    #minmax_scale(X, copy=False)

    w = Lagranj_nd(X, y)

    #return 0

    value = w.max()
    cond = w == value
    while len(cond[cond == True]) < 5000:
        value = np.max(w[w < value])
        cond = w >= value
        if len(cond[cond == True]) > 154:
            compactness(X[:, cond], y, types)

    return 0
    print(len(cond[cond == True]))

    X = X[:, cond]
    types = types[cond]

    metric = 1

    #nnc = NearestNeighborClassifier_(noisy=True)
    nnc = NearestNeighborClassifier()
    # nnc = TemplateClassifier(noisy=True)
    nn = MLPClassifier()
    svm = SVC()

    k = 10
    mean1 = 0
    mean2 = 0
    mean3 = 0
    for i in range(k):
        X_train, X_test, y_train, y_test = train_test_split(
            X, y, test_size=0.5, random_state=None, shuffle=True)

        nnc.fit(X_train, y_train)
        svm.fit(X_train, y_train)
        nn.fit(X_train, y_train)

        mean1 += nnc.score(X_test, y_test)
        mean2 += svm.score(X_test, y_test)
        mean3 += nn.score(X_test, y_test)

    mean1 /= k
    mean2 /= k
    mean3 /= k

    print(mean1, mean2, mean3)
Exemple #5
0
def main():
    X, types, y = ToFormNumpy("D:\\tanlanmalar\\IT_BORI_42_6.txt")
    #X, types, y = ToFormNumpy("D:\\tanlanmalar\\giper_my.txt")

    y -= 1

    minmax_scale(X, copy=False)
    #Normalizing_Estmation(X, y)

    compactness(X, y, types=types, metric=1)
Exemple #6
0
    def calculate(self):
        try:
            noisy_indx = find_noisy(self.X, self.y)
            noisies = self.count(noisy_indx, self.y)

            if self.cal > 0:
                X = self.X[noisy_indx == False]
                y = self.y[noisy_indx == False]
            else:
                X = self.X
                y = self.y

            shells = self.count(find_shell(X, y), y)
            etalons = self.count(find_standard(X, y), y)
            comp = compactness(X, y)

            self.result.setRowCount(self.cal + 1)

            self.result.setItem(self.cal, 0, QTableWidgetItem(str(shells[0])))
            self.result.setItem(self.cal, 1, QTableWidgetItem(str(shells[1])))

            self.result.setItem(self.cal, 2, QTableWidgetItem(str(noisies[0])))
            self.result.setItem(self.cal, 3, QTableWidgetItem(str(noisies[1])))

            self.result.setItem(self.cal, 4, QTableWidgetItem(str(etalons[0])))
            self.result.setItem(self.cal, 5, QTableWidgetItem(str(etalons[1])))

            self.result.setItem(self.cal, 6, QTableWidgetItem(str(comp[0])))
            self.result.setItem(self.cal, 7, QTableWidgetItem(str(comp[1])))
            self.result.setItem(self.cal, 8, QTableWidgetItem(str(comp[2])))
            self.cal += 1

        except Exception as exc:
            QMessageBox.about(self, "Hisoblashda xatolik bor: ", str(exc))
def main():
    X, types, y = ToFormNumpy("D:\\tanlanmalar\\gasterlogy1394.txt")
    X, types, y = ToFormNumpy("D:\\tanlanmalar\\spame.txt")
    # X, types, y = ToFormNumpy("D:\\tanlanmalar\\MATBIO_MY.txt")
    minmax_scale(X, copy=False)

    res = compactness(X, y, types=types, metric=1)

    print(res[0], res[1], res[2])
def main():
    path = r"D:\Nuu\AI\Selections\Amazon_initial_50_30_10000\data.txt"

    X, types, y = ToFormNumpy(path)

    minmax_scale(X, copy=False)

    _, ln = np.unique(y, return_counts=True)

    w = Lagranj1(X, y)

    compactness(X, y, types=types, metric=1)

    while X.shape[1] > 2:
        cond = w != w.min()
        X = X[:, cond]
        w = w[cond]

        compactness(X, y, types=types, metric=1)
def main():
    path = r"D:\Nuu\AI\Selections\LSVT_voice_rehabilitation\data.txt"

    X, types, y = ToFormNumpy(path)

    #minmax_scale(X, copy=False)
    Normalizing_Estmation(X, y)

    _, ln = np.unique(y, return_counts=True)

    w = Lagranj1(X, y)

    compactness(X, y, types=types, metric=1)

    while X.shape[1] > 2:
        cond = w != w.min()
        X = X[:, cond]
        w = w[cond]

        compactness(X, y, types=types, metric=1)
def main():
    path = r"D:\Nuu\AI\Selections\gene-expression\data1.csv"

    X, types, y = ReadFromCSVWithHeaderClass(path)

    #minmax_scale(X, copy=False)
    Normalizing_Estmation(X, y)

    _, ln = np.unique(y, return_counts=True)

    w = Lagranj1(X, y)

    compactness(X, y, types=types, metric=1)

    while X.shape[1] > 2:
        cond = w != w.min()
        X = X[:, cond]
        w = w[cond]

        compactness(X, y, types=types, metric=1)
Exemple #11
0
def main():
    path = r"D:\tanlanmalar\GIPER_MY.txt"

    X, types, y = ToFormNumpy(path)

    y -= 1

    minmax_scale(X, copy=False)
    # Normalizing_Estmation(X, y)

    print(compactness(X, y, types))
    res = find_standard(X, y, types)
    res = find_noisy(X, y, types)

    s = 0
    for i in range(res.shape[0]):
        if res[i] == True and y[i] == 1:
            print(i + 1)
            s += 1

    print(s)

    return 0

    #nnc = NearestNeighborClassifier_(noisy=True)
    #nnc = NearestNeighborClassifier()
    #nnc = TemplateClassifier(noisy=True)
    nn = MLPClassifier()
    svm = SVC()

    k = 10
    mean1 = 0
    mean2 = 0
    mean3 = 0
    for i in range(k):
        X_train, X_test, y_train, y_test = train_test_split(X,
                                                            y,
                                                            test_size=0.2,
                                                            random_state=None,
                                                            shuffle=True)

        nnc.fit(X_train, y_train)
        svm.fit(X_train, y_train)
        nn.fit(X_train, y_train)

        mean1 += nnc.score(X_test, y_test)
        mean2 += svm.score(X_test, y_test)
        mean3 += nn.score(X_test, y_test)

    mean1 /= k
    mean2 /= k
    mean3 /= k

    print(mean1, mean2, mean3)
Exemple #12
0
def main():
    path = r"D:\Nuu\AI\Selections\leukemia\leukemia_big.csv"

    X, types, y = ReadFromCSVWithHeaderClass(path)

    #minmax_scale(X, copy=False)
    Normalizing_Estmation(X, y)

    _, ln = np.unique(y, return_counts=True)

    w = Lagranj1(X, y)

    while X.shape[1] > 13:
        cond = w != w.min()
        X = X[:, cond]
        w = w[cond]

        #print(X.shape)
        compactness(X, y, types=types, metric=1)
    """nnc1 = NearestNeighborClassifier_(noisy=True)
Exemple #13
0
def main():
    #X, types, y = ToFormNumpy("D:\\tanlanmalar\\gasterlogy1394.txt")
    #X, types, y = ToFormNumpy("D:\\tanlanmalar\\spame.txt")
    #X, types, y = ToFormNumpy("D:\\tanlanmalar\\MATBIO_MY.txt")
    X, types, y = ToFormNumpy("D:\\tanlanmalar\\arcene_train.txt")

    minmax_scale(X, copy=False)

    w = Lagranj_nd(X, y)

    print(w.shape)

    X_Test = np.array(X[:, w == w.min()])
    types_Test = np.array(types[w == w.min()])

    print(X_Test)

    res = compactness(X_Test, y, types=types_Test, metric=1)
def main():
    #X, types, y = ToFormNumpy("D:\\tanlanmalar\\gasterlogy1394.txt")
    X, types, y = ToFormNumpy(
        r"D:\Nuu\AI\Selections\Amazon_initial_50_30_10000\data.txt")

    #y[y == 2] = 1

    minmax_scale(X, copy=False)
    #minmax_scale(X, copy=False)

    w = Lagranj_nd(X, y)

    unique, ln = np.unique(y, return_counts=True)

    number_class = len(unique)

    #return 0

    value = w.max()
    cond_opt = w == value

    comp_opt = compactness(X[:, cond_opt], y, types)
    print(len(cond_opt[cond_opt == True]), comp_opt, sep="\t")

    while len(cond_opt[cond_opt == True]) < X.shape[1]:
        value = np.max(w[w < value])

        cond_current = np.logical_or(w == value, cond_opt)

        comp_current = compactness(X[:, cond_current], y, types)

        if comp_opt[number_class] < comp_current[number_class]:
            cond_opt = cond_current
            comp_opt = comp_current
            print(len(cond_opt[cond_opt == True]), comp_opt, sep="\t")

    return 0

    X = X[:, cond]
    types = types[cond]

    metric = 1

    #nnc = NearestNeighborClassifier_(noisy=True)
    nnc = NearestNeighborClassifier()
    # nnc = TemplateClassifier(noisy=True)
    nn = MLPClassifier()
    svm = SVC()

    k = 10
    mean1 = 0
    mean2 = 0
    mean3 = 0
    for i in range(k):
        X_train, X_test, y_train, y_test = train_test_split(X,
                                                            y,
                                                            test_size=0.5,
                                                            random_state=None,
                                                            shuffle=True)

        nnc.fit(X_train, y_train)
        svm.fit(X_train, y_train)
        nn.fit(X_train, y_train)

        mean1 += nnc.score(X_test, y_test)
        mean2 += svm.score(X_test, y_test)
        mean3 += nn.score(X_test, y_test)

    mean1 /= k
    mean2 /= k
    mean3 /= k

    print(mean1, mean2, mean3)