Пример #1
0
def wda_pre(data_train_center, data_test_center, y_label, direction, reg=100):

    P0 = np.random.randn(data_train_center.shape[1], direction)
    P0 /= np.sqrt(np.sum(P0**2, 0, keepdims=True))

    Pwda, projwda = wda(data_train_center,
                        y_label,
                        direction,
                        reg,
                        k=10,
                        maxiter=30,
                        P0=P0)
    data_wda_fit = projwda(data_train_center)
    data_wda_test = projwda(data_test_center)

    return data_wda_fit, data_wda_test
Пример #2
0
 def fit(self, fd_train: FDataGrid, y_train: np.array):
     """Fits the classifier using median of each class of the training samples 
     Arguments :
         - fd_train : training data - FDataGrid
         - y_train : training labels - ndarray of shape (n_samples,)"""
     X_train = np.transpose(fd_train.data_matrix, axes=(0,2,1))
     # compute mean of each class and label them
     mean_outliers = np.mean(X_train[np.where(y_train==1)[0]], axis=0).transpose()
     mean_inliers = np.mean(X_train[np.where(y_train==0)[0]], axis=0).transpose()
     train = np.concatenate((mean_outliers, mean_inliers))
     train_lab = np.zeros(len(train))
     train_lab[0:len(mean_outliers)] = 1
     # project sampels
     P0 = np.random.randn(train.shape[1], self.p)
     P0 /= np.sqrt(np.sum(P0**2, 0, keepdims=True))
     self.Pwda, self.projwda = wda(train, train_lab, self.p, self.reg, k=10, maxiter=100, P0=P0)
     train_proj = self.projwda(train)
     # fit classifier
     self.clf.fit(train_proj, train_lab)
Пример #3
0
#%% plot samples

pl.figure(1)

pl.scatter(xt[:, 0], xt[:, 1], c=ys, marker='+', label='Source samples')
pl.legend(loc=0)
pl.title('Discriminant dimensions')

#%% plot distributions and loss matrix
p = 2
reg = 1
k = 10
maxiter = 100

P, proj = wda(xs, ys, p, reg, k, maxiter=maxiter)

#%% plot samples

xsp = proj(xs)
xtp = proj(xt)

pl.figure(1, (10, 5))

pl.subplot(1, 2, 1)
pl.scatter(xsp[:, 0], xsp[:, 1], c=ys, marker='+', label='Projected samples')
pl.legend(loc=0)
pl.title('Projected training samples')

pl.subplot(1, 2, 2)
pl.scatter(xtp[:, 0], xtp[:, 1], c=ys, marker='+', label='Projected samples')
Пример #4
0
#%% Compute FDA
p = 2

Pfda, projfda = fda(xs, ys, p)

##############################################################################
# Compute Wasserstein Discriminant Analysis
# -----------------------------------------

#%% Compute WDA
p = 2
reg = 1e0
k = 10
maxiter = 100

Pwda, projwda = wda(xs, ys, p, reg, k, maxiter=maxiter)


##############################################################################
# Plot 2D projections
# -------------------

#%% plot samples

xsp = projfda(xs)
xtp = projfda(xt)

xspw = projwda(xs)
xtpw = projwda(xt)

pl.figure(2)
Пример #5
0
#%% Compute FDA
p = 2

Pfda, projfda = fda(xs, ys, p)

##############################################################################
# Compute Wasserstein Discriminant Analysis
# -----------------------------------------

#%% Compute WDA
p = 2
reg = 1e0
k = 10
maxiter = 100

Pwda, projwda = wda(xs, ys, p, reg, k, maxiter=maxiter)


##############################################################################
# Plot 2D projections
# -------------------

#%% plot samples

xsp = projfda(xs)
xtp = projfda(xt)

xspw = projwda(xs)
xtpw = projwda(xt)

pl.figure(2)