コード例 #1
0
ファイル: gpnh_convex_coding.py プロジェクト: azedarach/reor
    def _update_weights(self):
        """Update weights using line-search."""

        self.SSt = self.S.dot(self.S.T)
        self.XSt = self.X.dot(self.S.T)

        self._update_weights_gradient()

        self.incr_Gamma = self.Gamma - self.alpha_Gamma * self.grad_Gamma
        simplex_project_rows(self.incr_Gamma)
        self.incr_Gamma -= self.Gamma

        error, step_size = self._weights_line_search()

        self.delta_grad_Gamma = self.grad_Gamma.copy()

        self._update_weights_gradient()

        self.delta_grad_Gamma = self.grad_Gamma - self.delta_grad_Gamma

        sksk = step_size**2 * (self.incr_Gamma * self.incr_Gamma).sum()
        beta = step_size * (self.incr_Gamma * self.delta_grad_Gamma).sum()

        self.alpha_Gamma = get_next_spg_alpha(
            beta,
            sksk,
            alpha_min=self.line_search_alpha_min,
            alpha_max=self.line_search_alpha_max)

        return error
コード例 #2
0
ファイル: gpnh_convex_coding.py プロジェクト: azedarach/reor
    def _update_dictionary(self):
        """Update dictionary using line-search."""

        self.SSt = self.S.dot(self.S.T)
        self.GtX = self.Gamma.T.dot(self.X)
        self.GtG = self.Gamma.T.dot(self.Gamma)

        self._update_dictionary_gradient()

        self.incr_S = self.S - self.alpha_S * self.grad_S
        self.incr_S -= self.S

        error, step_size = self._dictionary_line_search()

        self.delta_grad_S = self.grad_S.copy()

        self._update_dictionary_gradient()

        self.delta_grad_S = self.grad_S - self.delta_grad_S

        sksk = step_size**2 * (self.incr_S * self.incr_S).sum()
        beta = step_size * (self.incr_S * self.delta_grad_S).sum()

        self.alpha_S = get_next_spg_alpha(beta,
                                          sksk,
                                          alpha_min=self.line_search_alpha_min,
                                          alpha_max=self.line_search_alpha_max)

        return error
コード例 #3
0
    def _update_parameters(self):
        """Update parameters using line-search."""

        self._update_predicted_weights()
        self._update_parameters_gradient()

        self.incr_order_weights = (
            self.order_weights -
            self.alpha_parameters * self.grad_order_weights)
        self.incr_transition_matrices = (
            self.transition_matrices -
            self.alpha_parameters * self.grad_transition_matrices)

        self.incr_order_weights = simplex_project_vector(
            self.incr_order_weights)

        n_lags = self.lags.size
        for i in range(n_lags):
            simplex_project_columns(self.incr_transition_matrices[i])

        self.incr_order_weights -= self.order_weights
        self.incr_transition_matrices -= self.transition_matrices

        error, step_size = self._parameters_line_search()

        self.delta_grad_order_weights = self.grad_order_weights.copy()
        self.delta_grad_transition_matrices = self.grad_transition_matrices.copy(
        )

        self._update_parameters_gradient()

        self.delta_grad_order_weights = (self.grad_order_weights -
                                         self.delta_grad_order_weights)
        self.delta_grad_transition_matrices = (
            self.grad_transition_matrices -
            self.delta_grad_transition_matrices)

        sksk = step_size**2 * (
            (self.incr_order_weights * self.incr_order_weights).sum() +
            (self.incr_transition_matrices *
             self.incr_transition_matrices).sum())
        beta = step_size * (
            (self.incr_order_weights * self.delta_grad_order_weights).sum() +
            (self.incr_transition_matrices *
             self.delta_grad_transition_matrices).sum())

        self.alpha_parameters = get_next_spg_alpha(
            beta,
            sksk,
            alpha_min=self.line_search_alpha_min,
            alpha_max=self.line_search_alpha_max)

        return error
コード例 #4
0
ファイル: archetypal_analysis.py プロジェクト: azedarach/reor
    def _update_weights(self):
        """Update weights using line-search."""

        self.CK = self.C.dot(self.K)

        diag_alpha = np.diag(self.alpha)
        if self.delta > 0:
            self.CK = diag_alpha.dot(self.CK)

        self.CKCt = self.CK.dot(self.C.T)
        if self.delta > 0:
            self.CKCt = self.CKCt.dot(diag_alpha)

        self._update_weights_gradient()

        self.incr_S = self.S - self.alpha_S * self.grad_S
        simplex_project_rows(self.incr_S)
        self.incr_S -= self.S

        error, step_size = self._weights_line_search()

        self.delta_grad_S = self.grad_S.copy()

        self._update_weights_gradient()

        self.delta_grad_S = self.grad_S - self.delta_grad_S

        sksk = step_size**2 * (self.incr_S * self.incr_S).sum()
        beta = step_size * (self.incr_S * self.delta_grad_S).sum()

        self.alpha_S = get_next_spg_alpha(beta,
                                          sksk,
                                          alpha_min=self.line_search_alpha_min,
                                          alpha_max=self.line_search_alpha_max)

        return error
コード例 #5
0
ファイル: archetypal_analysis.py プロジェクト: azedarach/reor
    def _update_dictionary(self):
        """Update dictionary using line-search."""

        self.StS = self.S.T.dot(self.S)

        self.CK = self.C.dot(self.K)
        diag_alpha = np.diag(self.alpha)
        if self.delta > 0:
            self.CK = diag_alpha.dot(self.CK)

        self.CKCt = self.CK.dot(self.C.T)
        if self.delta > 0:
            self.CKCt = self.CKCt.dot(diag_alpha)

        self._update_dictionary_gradient()

        self.incr_C = self.C - self.alpha_C * self.grad_C
        simplex_project_rows(self.incr_C)
        self.incr_C -= self.C

        error, step_size = self._dictionary_line_search()

        self.delta_grad_C = self.grad_C.copy()

        self._update_dictionary_gradient()

        self.delta_grad_C = self.grad_C - self.delta_grad_C

        sksk = step_size**2 * (self.incr_C * self.incr_C).sum()
        beta = step_size * (self.incr_C * self.delta_grad_C).sum()

        self.alpha_C = get_next_spg_alpha(beta,
                                          sksk,
                                          alpha_min=self.line_search_alpha_min,
                                          alpha_max=self.line_search_alpha_max)

        if self.delta > 0:
            self.CK = self.C.dot(self.K)
            self.CKS = self.CK.dot(self.S)
            self.CKCt = self.CK.dot(self.C.T)

            self._update_scale_factors_gradient()

            self.incr_alpha = diag_alpha - self.alpha_alpha * self.grad_alpha
            self.incr_alpha = np.fmin(self.incr_alpha, 1 - self.delta)
            self.incr_alpha = np.fmax(self.incr_alpha, 1 + self.delta)
            self.incr_alpha -= diag_alpha

            scales_error, step_size = self._scale_factors_line_search()

            self.delta_grad_alpha = self.grad_alpha.copy()

            self._update_scale_factors_gradient()

            self.delta_grad_alpha = self.grad_alpha - self.delta_grad_alpha

            sksk = step_size**2 * (self.incr_alpha * self.incr_alpha).sum()
            beta = step_size * (self.incr_alpha * self.delta_grad_alpha).sum()

            self.alpha_alpha = get_next_spg_alpha(
                beta,
                sksk,
                alpha_min=self.line_search_alpha_min,
                alpha_max=self.line_search_alpha_max)

            if scales_error > error:
                error = scales_error

        return error