Esempio n. 1
0
 def _check_params(self):
     if self.sample_support is None:
         self.sample_support = self.views[0].shape[0]
     self.sample_weights = np.ones(self.views[0].shape[0])
     self.sample_weights /= np.linalg.norm(self.sample_weights)
     self.c = _process_parameter("c", self.c, 2, len(self.views))
     self.positive = _process_parameter("positive", self.positive, False,
                                        len(self.views))
Esempio n. 2
0
 def _check_params(self):
     self.kernel = _process_parameter("kernel", self.kernel, "linear",
                                      self.n_views)
     self.gamma = _process_parameter("gamma", self.gamma, None,
                                     self.n_views)
     self.coef0 = _process_parameter("coef0", self.coef0, 1, self.n_views)
     self.degree = _process_parameter("degree", self.degree, 1,
                                      self.n_views)
     self.c = _process_parameter("c", self.c, 0, self.n_views)
Esempio n. 3
0
 def _check_params(self):
     if self.c is None:
         warnings.warn(
             "c parameter not set. Setting to c=1 i.e. maximum regularisation of l1 norm"
         )
     self.c = _process_parameter("c", self.c, 1, len(self.views))
     if any(c < 0 or c > 1 for c in self.c):
         raise ValueError(
             "All regularisation parameters should be between 0 and 1 "
             f"1. c=[{self.c}]")
     shape_sqrts = [np.sqrt(view.shape[1]) for view in self.views]
     self.t = [max(1, x * y) for x, y in zip(self.c, shape_sqrts)]
     self.positive = _process_parameter("positive", self.positive, False,
                                        len(self.views))
Esempio n. 4
0
 def _check_params(self):
     """check number of views=2"""
     if len(self.views) != 2:
         raise ValueError(f"SpanCCA requires only 2 views")
     cov = self.views[0].T @ self.views[1] / self.n
     # Perform SVD on im and obtain individual matrices
     P, D, Q = np.linalg.svd(cov, full_matrices=True)
     self.P = P[:, :self.rank]
     self.D = D[:self.rank]
     self.Q = Q[:self.rank, :].T
     self.max_obj = 0
     if self.regularisation == "l0":
         self.update = _support_soft_thresh
         self.c = _process_parameter("c", self.c, 0, len(self.views))
     elif self.regularisation == "l1":
         self.update = _delta_search
         self.c = _process_parameter("c", self.c, 0, len(self.views))
     self.positive = _process_parameter("positive", self.positive, False,
                                        len(self.views))
Esempio n. 5
0
    def _check_params(self):
        self.c = _process_parameter("c", self.c, 0, len(self.views))
        self.lam = _process_parameter("lam", self.lam, 1, len(self.views))
        if self.mu is None:
            self.mu = [
                lam / np.linalg.norm(view)**2
                for lam, view in zip(self.lam, self.views)
            ]
        else:
            self.mu = _process_parameter("mu", self.mu, 0, len(self.views))
        self.eta = _process_parameter("eta", self.eta, 0, len(self.views))

        if any(mu <= 0 for mu in self.mu):
            raise ValueError("At least one mu is less than zero.")

        _check_Parikh2014(self.mu, self.lam, self.views)

        self.eta = [
            np.ones(view.shape[0]) * eta
            for view, eta in zip(self.views, self.eta)
        ]
        self.z = [np.zeros(view.shape[0]) for view in self.views]
        self.l1_ratio = [1] * len(self.views)
Esempio n. 6
0
 def _check_params(self):
     self.c = _process_parameter("c", self.c, 0, len(self.views))
     self.l1_ratio = _process_parameter("l1_ratio", self.l1_ratio, 0,
                                        len(self.views))
     self.positive = _process_parameter("positive", self.positive, False,
                                        len(self.views))
     self.regressors = []
     for alpha, l1_ratio, positive in zip(self.c, self.l1_ratio,
                                          self.positive):
         if self.stochastic:
             self.regressors.append(
                 SGDRegressor(
                     penalty="elasticnet",
                     alpha=alpha / len(self.views),
                     l1_ratio=l1_ratio,
                     fit_intercept=False,
                     tol=self.tol,
                     warm_start=True,
                     random_state=self.random_state,
                 ))
         elif alpha == 0:
             self.regressors.append(
                 Ridge(
                     alpha=self.tol,
                     fit_intercept=False,
                     positive=positive,
                 ))
         else:
             self.regressors.append(
                 ElasticNet(
                     alpha=alpha / len(self.views),
                     l1_ratio=l1_ratio,
                     fit_intercept=False,
                     warm_start=True,
                     positive=positive,
                     random_state=self.random_state,
                 ))
Esempio n. 7
0
 def _check_params(self):
     self.c = _process_parameter("c", self.c, 0.0001, len(self.views))
     if any(c <= 0 for c in self.c):
         raise ("All regularisation parameters should be above 0. "
                f"c=[{self.c}]")
Esempio n. 8
0
 def _check_params(self):
     self.nearest_neighbors = _process_parameter(
         "nearest_neighbors", self.nearest_neighbors, 1, self.n_views
     )
     self.gamma = _process_parameter("gamma", self.gamma, None, self.n_views)
     self.kernel = _process_parameter("kernel", None, "rbf", self.n_views)
Esempio n. 9
0
 def _check_params(self):
     self.c = _process_parameter("c", self.c, 0, self.n_views)
Esempio n. 10
0
 def _check_params(self):
     self.c = _process_parameter("c", self.c, 0, self.n_views)
     self.view_weights = _process_parameter(
         "view_weights", self.view_weights, 1, self.n_views
     )