Ejemplo n.º 1
0
    def calculate(self):
        try:
            noisy_indx = find_noisy(self.X, self.y)
            noisies = self.count(noisy_indx, self.y)

            if self.cal > 0:
                X = self.X[noisy_indx == False]
                y = self.y[noisy_indx == False]
            else:
                X = self.X
                y = self.y

            shells = self.count(find_shell(X, y), y)
            etalons = self.count(find_standard(X, y), y)
            comp = compactness(X, y)

            self.result.setRowCount(self.cal + 1)

            self.result.setItem(self.cal, 0, QTableWidgetItem(str(shells[0])))
            self.result.setItem(self.cal, 1, QTableWidgetItem(str(shells[1])))

            self.result.setItem(self.cal, 2, QTableWidgetItem(str(noisies[0])))
            self.result.setItem(self.cal, 3, QTableWidgetItem(str(noisies[1])))

            self.result.setItem(self.cal, 4, QTableWidgetItem(str(etalons[0])))
            self.result.setItem(self.cal, 5, QTableWidgetItem(str(etalons[1])))

            self.result.setItem(self.cal, 6, QTableWidgetItem(str(comp[0])))
            self.result.setItem(self.cal, 7, QTableWidgetItem(str(comp[1])))
            self.result.setItem(self.cal, 8, QTableWidgetItem(str(comp[2])))
            self.cal += 1

        except Exception as exc:
            QMessageBox.about(self, "Hisoblashda xatolik bor: ", str(exc))
Ejemplo n.º 2
0
def main():
    #X, types, y = ToFormNumpy("D:\\tanlanmalar\\gasterlogy1394.txt")
    #X, types, y = ToFormNumpy("D:\\tanlanmalar\\spame.txt")
    #X, types, y = ToFormNumpy("D:\\tanlanmalar\\MATBIO_MY.txt")
    X, types, y = ToFormNumpy(
        r"D:\Nuu\AI\Selections\Amazon_initial_50_30_10000\data.txt")

    metric = 1

    minmax_scale(X, copy=False)

    w = Lagranj_nd(X, y)

    value = w.min()
    X_Test = np.array(X[:, w == value])
    types_Test = np.array(types[w == value])

    i = 0

    while X_Test.shape[1] < 2000:

        value = np.min(w[w > value])

        X_Test = X[:, w <= value]
        types_Test = types[w <= value]

        noisy = find_noisy(X_Test, y, types=types_Test, metric=metric)

        cond = np.logical_not(noisy)

        print("\nnoisy = ", len(noisy[noisy == True]))

        compactness(X_Test[cond], y[cond], types=types_Test, metric=metric)

        i += 1
Ejemplo n.º 3
0
def main():
    path = r"D:\tanlanmalar\GIPER_MY.txt"

    X, types, y = ToFormNumpy(path)

    y -= 1

    minmax_scale(X, copy=False)
    # Normalizing_Estmation(X, y)

    print(compactness(X, y, types))
    res = find_standard(X, y, types)
    res = find_noisy(X, y, types)

    s = 0
    for i in range(res.shape[0]):
        if res[i] == True and y[i] == 1:
            print(i + 1)
            s += 1

    print(s)

    return 0

    #nnc = NearestNeighborClassifier_(noisy=True)
    #nnc = NearestNeighborClassifier()
    #nnc = TemplateClassifier(noisy=True)
    nn = MLPClassifier()
    svm = SVC()

    k = 10
    mean1 = 0
    mean2 = 0
    mean3 = 0
    for i in range(k):
        X_train, X_test, y_train, y_test = train_test_split(X,
                                                            y,
                                                            test_size=0.2,
                                                            random_state=None,
                                                            shuffle=True)

        nnc.fit(X_train, y_train)
        svm.fit(X_train, y_train)
        nn.fit(X_train, y_train)

        mean1 += nnc.score(X_test, y_test)
        mean2 += svm.score(X_test, y_test)
        mean3 += nn.score(X_test, y_test)

    mean1 /= k
    mean2 /= k
    mean3 /= k

    print(mean1, mean2, mean3)
Ejemplo n.º 4
0
    def fit(self, X, y):
        # Check that X and y have correct shape
        X, y = check_X_y(X, y)
        # Store the classes seen during fit
        self.classes_ = unique_labels(y)

        self.X_ = X
        self.y_ = y

        if self.Is_noisy:
            self.noisy = find_noisy(X, y, metric=self.metric)

        # Return the classifier
        return self
Ejemplo n.º 5
0
def main():
    X, types, y = ToFormNumpy("D:\\tanlanmalar\\gasterlogy1394.txt")
    #X, types, y = ToFormNumpy("D:\\tanlanmalar\\spame.txt")
    # X, types, y = ToFormNumpy("D:\\tanlanmalar\\MATBIO_MY.txt")

    metric = 1

    minmax_scale(X, copy=False)

    noisy = find_noisy(X, y, types=types, metric=metric)

    #for item in noisy:
    #    print(item)

    print(len(noisy))
Ejemplo n.º 6
0
def main():

    path = r"D:\Nuu\AI\Selections\leukemia\leukemia_small.csv"

    X, types, y = ReadFromCSVWithHeaderClass(path)

    minmax_scale(X, copy=False)
    #minmax_scale(X, copy=False)
    """w = Lagranj_nd(X)

    value = w.max()
    cond = w == value
    while len(cond[cond == True]) < 661:
        value = np.max(w[w < value])
        cond = w >= value


    print(len(cond[cond == True]))

    X_Test = X[:, w >= value]
    types_Test = types[w >= value]

    metric = 1

    noisy = find_noisy(X_Test, y, types=types_Test, metric=metric)

    cond = np.logical_not(noisy)

    X_Test = X_Test[cond]
    y_Test = y[cond]

    print(X.shape)

"""

    noisy = find_noisy(X, y, types=types)

    cond = np.logical_not(noisy)
    X = X[cond]
    y = y[cond]

    k = 10
    k_fold = KFold(n_splits=k, shuffle=True, random_state=None)
    """
    # Neighbors
    nnc = NearestNeighborClassifier()

    nnc_ = NearestNeighborClassifier_()

    knc = KNeighborsClassifier(n_neighbors=30)


    begin = time.time()
    max_mean1 = 0
    #max_mean1 = CVS(nnc, X_Test, y_Test, cv=k_fold, n_jobs=4, scoring='accuracy').mean()
    end = time.time()
    print("Time: ", (end - begin) * 1000)

    max_mean2 = 0
    max_mean2 = CVS(nnc_, X_Test, y_Test, cv=k_fold, n_jobs=4, scoring='accuracy').mean()

    begin = time.time()
    max_mean3 = 0
    max_mean3 = CVS(knc, X_Test, y_Test, cv=k_fold, n_jobs=4, scoring='accuracy').mean()
    end = time.time()
    print("Time: ", (end - begin) * 1000)

    print(max_mean1, max_mean2, max_mean3)
"""

    nnc = NearestNeighborClassifier_()
    nnc.fit(X, y)

    svm = SVC(kernel="linear")
    #svm.fit(X, y)

    nn = MLPClassifier(hidden_layer_sizes=(100, 200))
    #nn.fit(X, y)

    max_mean = CVS(nnc, X, y, cv=k_fold, n_jobs=4, scoring='accuracy').mean()

    print(max_mean)