Esempio n. 1
0
    def calc_model(self,
                   pca_method="combined",
                   no=5,
                   smooth_data=False,
                   sparam=25,
                   parallel=False,
                   C=None):
        """
        This function identifies a regression model with phase-variability
        using elastic pca

        :param pca_method: string specifing pca method (options = "combined",
                        "vert", or "horiz", default = "combined")
        :param no: scalar specify number of principal components (default=5)
        :param smooth_data: smooth data using box filter (default = F)
        :param sparam: number of times to apply box filter (default = 25)
        :param parallel: run in parallel (default = F)
        :param C: scale balance parameter for combined method (default = None)
        """

        if smooth_data:
            self.f = fs.smooth_data(self.f, sparam)

        N1 = self.f.shape[1]

        # Align Data
        self.warp_data = fs.fdawarp(self.f, self.time)
        self.warp_data.srsf_align(parallel=parallel)

        # Calculate PCA
        if pca_method == 'combined':
            out_pca = fpca.fdajpca(self.warp_data)
        elif pca_method == 'vert':
            out_pca = fpca.fdavpca(self.warp_data)
        elif pca_method == 'horiz':
            out_pca = fpca.fdahpca(self.warp_data)
        else:
            raise Exception('Invalid fPCA Method')
        out_pca.calc_fpca(no)

        # OLS using PCA basis
        lam = 0
        R = 0
        Phi = np.ones((N1, no + 1))
        Phi[:, 1:(no + 1)] = out_pca.coef
        xx = dot(Phi.T, Phi)
        inv_xx = inv(xx + lam * R)
        xy = dot(Phi.T, self.y)
        b = dot(inv_xx, xy)
        alpha = b[0]
        b = b[1:no + 1]

        # compute the SSE
        int_X = np.zeros(N1)
        for ii in range(0, N1):
            int_X[ii] = np.sum(out_pca.coef * b)

        SSE = np.sum((self.y - alpha - int_X)**2)

        self.alpha = alpha
        self.b = b
        self.pca = out_pca
        self.SSE = SSE
        self.pca_method = pca_method

        return
Esempio n. 2
0
    def calc_model(self,
                   pca_method="combined",
                   no=5,
                   smooth_data=False,
                   sparam=25,
                   parallel=False):
        """
        This function identifies a logistic regression model with phase-variability
        using elastic pca

        :param f: numpy ndarray of shape (M,N) of N functions with M samples
        :param y: numpy array of N responses
        :param time: vector of size M describing the sample points
        :param pca_method: string specifing pca method (options = "combined",
                        "vert", or "horiz", default = "combined")
        :param no: scalar specify number of principal components (default=5)
        :param smooth_data: smooth data using box filter (default = F)
        :param sparam: number of times to apply box filter (default = 25)
        :param parallel: run model in parallel (default = F)
        :type f: np.ndarray
        :type time: np.ndarray
        """

        if smooth_data:
            self.f = fs.smooth_data(self.f, sparam)

        N1 = self.f.shape[1]

        # Align Data
        self.warp_data = fs.fdawarp(self.f, self.time)
        self.warp_data.srsf_align(parallel=parallel)

        # Calculate PCA
        if pca_method == 'combined':
            out_pca = fpca.fdajpca(self.warp_data)
        elif pca_method == 'vert':
            out_pca = fpca.fdavpca(self.warp_data)
        elif pca_method == 'horiz':
            out_pca = fpca.fdahpca(self.warp_data)
        else:
            raise Exception('Invalid fPCA Method')
        out_pca.calc_fpca(no)

        # OLS using PCA basis
        lam = 0
        R = 0
        Phi = np.ones((N1, no + 1))
        Phi[:, 1:(no + 1)] = out_pca.coef
        # Find alpha and beta using l_bfgs
        b0 = np.zeros(self.n_classes * (no + 1))
        out = fmin_l_bfgs_b(rg.mlogit_loss,
                            b0,
                            fprime=rg.mlogit_gradient,
                            args=(Phi, self.Y),
                            pgtol=1e-10,
                            maxiter=200,
                            maxfun=250,
                            factr=1e-30)

        b = out[0]
        B0 = b.reshape(no + 1, self.n_classes)
        alpha = B0[0, :]

        # compute the Loss
        LL = rg.mlogit_loss(b, Phi, self.y)

        b = B0[1:no + 1, :]

        self.alpha = alpha
        self.b = b
        self.pca = out_pca
        self.LL = LL
        self.pca_method = pca_method

        return