Пример #1
0
    def score(self, X, y):
        """Returns the proportion of X correctly classified by the leave-one-
        out classification

        Parameters:
        -----------
        X : array-like
            Training data, shape = [n_features, n_samples]
        y : array-like
            Target values, shape = [n_samples]

        Returns:
        --------
        score : float
            The proportion of X correctly classified
        """
        return 1 - nca_cost.cost(self.metric, X, y)[0] / np.size(X, 1)
Пример #2
0
    def _survival_nca_cost(self, data, aliveStatus):
        """Gets cumulative cost and gradient for all time points"""

        # initialize cum_f and cum_gradf
        cum_f = 0
        cum_gradf = np.zeros(self.A.shape)

        # no of time points
        T = aliveStatus.shape[1]

        #t = 20
        for t in range(T):

            print("epoch {}: t = {} of {}".format(self.epochs, t, T - 1))

            # Get patients with known survival status at time t
            Y = aliveStatus[:, t]
            keep = Y >= 0
            # proceed only if there's enough known patients
            if np.sum(0 + keep) < self.N_SUBSET:
                print("skipping current t ...")
                continue
            Y = Y[keep]
            X = data[keep, :]

            # keep a random subset of patients (for efficiency)
            keep = np.random.randint(0, X.shape[0], self.N_SUBSET)
            Y = Y[keep]
            X = X[keep, :]

            f, gradf = nca_cost.cost(self.A.T,
                                     X.T,
                                     Y,
                                     SIGMA=self.SIGMA,
                                     LAMBDA=self.LAMBDA)

            cum_f += f
            cum_gradf += gradf.T  # sum of derivative is derivative of sum

        return [cum_f, cum_gradf]
Пример #3
0
# -*- coding: utf-8 -*-

import nca_cost
import numpy as np


N = 300
aux = (np.concatenate([0.5*np.ones((N/2, 1)),
                       np.zeros((N/2, 1)), 1.1*np.ones((N/2, 1))], axis=1))
X = np.concatenate([np.random.rand(N/2, 3),
                    np.random.rand(N/2, 3) + aux])

y = np.concatenate([np.concatenate([np.ones((N/2, 1)), np.zeros((N/2, 1))]),
                    np.concatenate([np.zeros((N/2, 1)), np.ones((N/2, 1))])], axis = 1)
X = X.T
y = y[:, 0]
A = np.array([[1, 0, 0], [0, 1, 0]])
print nca_cost.cost(A, X, y)
print nca_cost.cost_g(A, X, y)
Пример #4
0
# -*- coding: utf-8 -*-

import nca_cost
import numpy as np

N = 300
aux = (np.concatenate([
    0.5 * np.ones((N / 2, 1)),
    np.zeros((N / 2, 1)), 1.1 * np.ones((N / 2, 1))
],
                      axis=1))
X = np.concatenate([np.random.rand(N / 2, 3), np.random.rand(N / 2, 3) + aux])

y = np.concatenate([
    np.concatenate([np.ones(
        (N / 2, 1)), np.zeros((N / 2, 1))]),
    np.concatenate([np.zeros(
        (N / 2, 1)), np.ones((N / 2, 1))])
],
                   axis=1)
X = X.T
y = y[:, 0]
A = np.array([[1, 0, 0], [0, 1, 0]])
print nca_cost.cost(A, X, y)
print nca_cost.cost_g(A, X, y)