コード例 #1
0
class DiscriminantAnalysisModel(Classifier):
    """Model using LDA or GDA (linear discriminant analysis/ quadratic
       discriminant analysis).
       LDA is a classifier with a linear decision boundary, generated by
       fitting class conditional densities to the data and using Bayes’ rule.

       The model fits a Gaussian density to each class, assuming that all
       classes share the same covariance matrix.

       The fitted model can also be used to reduce the dimensionality of the input
       by projecting it to the most discriminative directions.

       Shrinkage is a tool to improve estimation of covariance matrices in
       situations where the number of training samples is small compared to the
       number of features. In this scenario, the empirical sample covariance is a poor estimator.

       QDA is a classifier with a quadratic decision boundary, also generated by
       fitting class conditional densities to the data and using Bayes’ rule.
    """

    # WRITE DOCUMENTATION
    def __init__(self,
                 type='lda',
                 solver='svd',
                 shrinkage=None,
                 reg_param=0.0,
                 ranking_size=30):
        """
        """
        if type == 'lda':

            self.solver = solver
            self.shrinkage = shrinkage
            self.clf = LinearDiscriminantAnalysis(solver=solver,
                                                  shrinkage=shrinkage)
        elif type == 'qda':
            self.reg_param = reg_param
            self.clf = QuadraticDiscriminantAnalysis(reg_param=reg_param)
        else:
            raise Exception(f"Unknown type of discriminant analysis: '{type}'")

        self.type = type
        self.ranking_size = ranking_size

    def transform(self, X):
        """Reduce the dimensionality of the input
           by projecting it to the most discriminative directions.
        """
        return self.clf.transform(X)
コード例 #2
0
        spikes.times, spikes.clusters, times)
    trial_blocks = (trials.probabilityLeft[((
        (trials.probabilityLeft > 0.55)
        | (trials.probabilityLeft < 0.45)))] > 0.55).astype(int)

    # Transform to LDA with leave-one-out cross validation
    print(
        'Projecting to LDA axis with leave-one-out cross-validation [%d of %d]'
        % (i + 1, sessions.shape[0]))
    resp = np.rot90(spike_counts)
    loo = LeaveOneOut()
    qda_transform = np.zeros(resp.shape[0])
    for train_index, test_index in loo.split(resp):
        qda = QDA()
        qda.fit(resp[train_index], trial_blocks[train_index])
        qda_transform[test_index] = np.rot90(qda.transform(
            resp[test_index]))[0]
    qda_convolve = np.convolve(qda_transform,
                               np.ones((10, )) / 10,
                               mode='same')

    # Plot
    fig, ax1 = plt.subplots(1, 1, figsize=(12, 8))
    sns.set(style="ticks", context="paper", font_scale=2)
    ax1.plot(np.arange(1, trial_times.shape[0] + 1),
             qda_transform,
             color=[0.6, 0.6, 0.6])
    ax1.plot(np.arange(1, trial_times.shape[0] + 1), qda_convolve, 'k', lw=3)
    ax1.set_ylabel('Position along LDA axis')
    ax1.set(ylim=[-6, 6])
    ax2 = ax1.twinx()
    ax2.plot(np.arange(1, trial_times.shape[0] + 1),