Ejemplo n.º 1
0
 def test(self, X_test):
     self.X_test = X_test.to_numpy()
     self.num_test_samples, self.num_test_features = self.X_test.shape
     y_predict = np.zeros(self.num_test_samples)
     for i in range(self.num_test_samples):
         sum = 0
         for j in range(self.num_train_samples):
             if self.kernel == "linear":
                 sum += (
                     self.alpha[j] * self.y_train[j] *
                     kernels.linear_kernel(self.X_train[j], self.X_test[i]))
             elif self.kernel == "polynomial":
                 sum += (self.alpha[j] *
                         self.y_train[j] * kernels.polynomial_kernel(
                             self.X_train[j], self.X_test[i]))
             elif self.kernel == "RBF":
                 sum += (
                     self.alpha[j] * self.y_train[j] *
                     kernels.RBF_kernel(self.X_train[j], self.X_test[i]))
             else:
                 print("Invalid kernel")
         if (sum + self.b) >= 0:
             y_predict[i] = 1
         else:
             y_predict[i] = -1
     return y_predict
Ejemplo n.º 2
0
    def __init__(self, kernel=kernels.linear_kernel(), C=1):
        self.kernel = kernel
        self.C = C

        self.lagr_multipliers = None
        self.support_vectors = None

        self.quad_term = None
        self.radius_sqr = None
 def __init__(self, threshold=1e-10, C=None, kernel=linear_kernel()):
     self.threshold = threshold
     self.C = C
     self.lagr_multipliers = None
     self.intercept = 0
     self.support_vectors_feature = None
     self.support_vectors_label = None
     self.kernel = kernel
     self.X, self.y = None, None
     self.n, self.p = 0, 0
Ejemplo n.º 4
0
 def create_gram_matrix(self):
     gram = np.zeros((self.num_train_samples, self.num_train_samples))
     for i in range(self.num_train_samples):
         for j in range(self.num_train_samples):
             if self.kernel == 'linear':
                 gram[i][j] = kernels.linear_kernel(self.X_train[i],
                                                    self.X_train[j])
             elif self.kernel == 'polynomial':
                 gram[i][j] = kernels.polynomial_kernel(
                     self.X_train[i], self.X_train[j])
             elif self.kernel == 'RBF':
                 gram[i][j] = kernels.RBF_kernel(self.X_train[i],
                                                 self.X_train[j])
             else:
                 print("Invalid kernel")
     return gram
Ejemplo n.º 5
0
 def __init__(self, kernel=linear_kernel()):
     self.X = None
     self.W = None
     self.kernel = kernel
     self.n = 0
Ejemplo n.º 6
0
if compute_linear_kernel:
    print("Compute linear kernel")

    preprocessed_train_filename = "grey_Xtr.csv"
    print("\tLoad {} ... ".format(preprocessed_train_filename),
          end="",
          flush=True)
    grey_X_train = (data_loader.load_data(preprocessed_train_filename))
    print("OK")

    print("\tCompute linear Kernel on {} ... ".format(
        preprocessed_train_filename),
          end="",
          flush=True)
    K_train = kernels.linear_kernel(grey_X_train, grey_X_train)
    print("OK")

    K_train_filename = "grey_Ktr.csv"
    print("\tStore linear Kernel in {} ... ".format(K_train_filename),
          end="",
          flush=True)
    K_train.tofile(path_to_data + K_train_filename)
    print("OK", end="\n\n")

    preprocessed_test_filename = "grey_Xte.csv"
    print("\tLoad {} ... ".format(preprocessed_test_filename),
          end="",
          flush=True)
    grey_X_test = data_loader.load_data(preprocessed_test_filename)
    print("OK")
Ejemplo n.º 7
0
#Standardize data in order to get mean=0 and the variance=1
bc_x_std = StandardScaler().fit_transform(bc_x)

#Normal logistic regression
reg_score = cross_val_score(LogisticRegression(),
                            bc_x_std,
                            bc_y,
                            scoring='accuracy',
                            cv=5)
reg_results = reg_score.mean()
print('Normal log reg results:')
print(reg_results)

#linear kernel logistic regression
bc_linear = AJ_kernels.linear_kernel(bc_x_std)
linear_reg_score = cross_val_score(LogisticRegression(),
                                   bc_linear,
                                   bc_y,
                                   scoring='accuracy',
                                   cv=5)
linear_reg_results = linear_reg_score.mean()
print('Linear kernelized log reg results:')
print(linear_reg_results)

#Gaussian rbf kernel logistic regression
rbf_reg_results = []
for gamma in range(100):
    bc_rbf = AJ_kernels.rbf_kernel(bc_x_std, gamma)
    rbf_reg_score = cross_val_score(LogisticRegression(),
                                    bc_rbf,