Пример #1
0
# Exclude root
decoding_result = decoding_result.reset_index()
incl_regions = [i for i, j in enumerate(decoding_result['region']) if not j.islower()]
decoding_result = decoding_result.loc[incl_regions]

# Drop duplicates
decoding_result = decoding_result[~decoding_result.duplicated(subset=['region', 'eid', 'probe'])]

# Calculate accuracy over chance
decoding_result['acc_over_chance'] = (decoding_result['accuracy']
                                      - decoding_result['chance_accuracy']) * 100

# Remove cortical layers from brain region map
ba = atlas.AllenAtlas(25)
all_regions = combine_layers_cortex(ba.regions.acronym)

# Calculate average decoding performance per region
decode_regions = []
accuracy = []
for i, region in enumerate(decoding_result['region'].unique()):
    if np.sum(decoding_result['region'] == region) >= MIN_REC:
        decode_regions.append(region)
        accuracy.append(decoding_result.loc[decoding_result['region'] == region,
                                            'acc_over_chance'].mean())

f, axs1 = plt.subplots(1, 3, figsize=(30, 6))
figure_style(font_scale=2)
if CENTERED:
    plot_atlas(np.array(decode_regions), np.array(accuracy), ML, AP, DV, color_palette='RdBu_r',
               hemisphere='left', minmax=MINMAX, axs=axs1,
Пример #2
0
        probes_to_use = spikes.keys()
    else:
        subject = sessions[i]['subject']
        date = sessions[i]['start_time'][:10]
        probes_to_use = spikes.keys()

    # Decode per brain region
    for p, probe in enumerate(probes_to_use):
        print('Processing %s (%d of %d)' % (probe, p + 1, len(probes_to_use)))

        # Check if histology is available for this probe
        if not hasattr(clusters[probe], 'acronym'):
            continue

        # Get brain regions and combine cortical layers
        regions = combine_layers_cortex(np.unique(clusters[probe]['acronym']))

        # Decode per brain region
        for r, region in enumerate(np.unique(regions)):
            print('Decoding region %s (%d of %d)' %
                  (region, r + 1, len(np.unique(regions))))

            # Get clusters in this brain region
            region_clusters = combine_layers_cortex(clusters[probe]['acronym'])
            clusters_in_region = clusters[probe].metrics.cluster_id[
                region_clusters == region]

            # Select spikes and clusters
            spks_region = spikes[probe].times[np.isin(spikes[probe].clusters,
                                                      clusters_in_region)]
            clus_region = spikes[probe].clusters[np.isin(
        if probe not in clusters.keys():
            continue

        # Check if histology is available for this probe
        if not hasattr(clusters[probe], 'acronym'):
            continue

        # Check if cluster metrics are available
        if 'metrics' not in clusters[probe]:
            continue

        # Get list of brain regions
        if ATLAS == 'beryl-atlas':
            clusters_regions = remap(clusters[probe]['atlas_id'])
        elif ATLAS == 'allen-atlas':
            clusters_regions = combine_layers_cortex(clusters[probe]['acronym'])

        # Get list of neurons that pass QC
        if INCL_NEURONS == 'pass-QC':
            clusters_pass = np.where(clusters[probe]['metrics']['label'] == 1)[0]
        elif INCL_NEURONS == 'all':
            clusters_pass = np.arange(clusters[probe]['metrics'].shape[0])

        # Process per brain region
        for r, region in enumerate(np.unique(clusters_regions)):

            # Skip region if any of these conditions apply
            if region.islower():
                continue

            print('Processing region %s (%d of %d)' % (region, r + 1, len(np.unique(clusters_regions))))
Пример #4
0
FIG_PATH = join(FIG_PATH, 'Decoding', 'Sessions', DECODER)

# %%
# Load in data
spikes, clusters, channels = bbone.load_spike_sorting_with_channel(EID, aligned=True, one=one)
ses_path = one.path_from_eid(EID)
trials = alf.io.load_object(join(ses_path, 'alf'), 'trials')

# Get trial vectors
incl_trials = (trials.probabilityLeft == 0.8) | (trials.probabilityLeft == 0.2)
trial_times = trials.stimOn_times[incl_trials]
probability_left = trials.probabilityLeft[incl_trials]
trial_blocks = (trials.probabilityLeft[incl_trials] == 0.2).astype(int)

# Get clusters in this brain region
region_clusters = combine_layers_cortex(clusters[PROBE]['acronym'])
clusters_in_region = clusters[PROBE].metrics.cluster_id[region_clusters == REGION]

# Select spikes and clusters
spks_region = spikes[PROBE].times[np.isin(spikes[PROBE].clusters, clusters_in_region)]
clus_region = spikes[PROBE].clusters[np.isin(spikes[PROBE].clusters,
                                             clusters_in_region)]

# Decode block identity
decode_block = decode(spks_region, clus_region, trial_times, trial_blocks,
                      pre_time=PRE_TIME, post_time=POST_TIME,
                      classifier=DECODER, cross_validation='kfold-interleaved',
                      num_splits=5)

shuffle_block = decode(spks_region, clus_region, trial_times, trial_blocks,
                       pre_time=PRE_TIME, post_time=POST_TIME,