Exemple #1
0
def plotsvd(file, f_size=10, filter_size=1):

    data = np.load(file)
    filename = os.path.split(file)[-1].split('.')[0]

    sta = data['sta_unscaled']
    max_i = data['max_i']

    sta, max_i = mf.cut_around_center(sta, max_i, f_size=f_size)

    fit_frame = sta[:, :,  max_i[2]]

    # %%
    sp1, sp2, t1, t2, u, v = mf.svd(sta)

    sp1_filtered = ndi.filters.gaussian_filter(sp1,
                                               sigma=(filter_size,
                                                      filter_size))
    sp2_filtered = ndi.filters.gaussian_filter(sp2,
                                               sigma=(filter_size,
                                                      filter_size))
    ff_filtered = ndi.filters.gaussian_filter(fit_frame,
                                              sigma=(filter_size,
                                                     filter_size))

    plotthese = [fit_frame, sp1, sp2, ff_filtered, sp1_filtered, sp2_filtered]

    fig = plt.figure(dpi=130)
    plt.suptitle('{}\n frame size: {}'.format(filename, f_size))
    rows = 3
    columns = 3
    vmax = np.max(np.abs(sp1))
    vmin = -vmax

    for i in range(6):
        ax = plt.subplot(rows, columns, i+1)
        im = plt.imshow(plotthese[i], vmin=vmin, vmax=vmax, cmap=plf.RFcolormap())
        ax.set_aspect('equal')
        plt.xticks([])
        plt.yticks([])
        for child in ax.get_children():
            if isinstance(child, matplotlib.spines.Spine):
                child.set_color('C{}'.format(i % 3))
                child.set_linewidth(2)
        if i==0: plt.title('center px'); fig.colorbar(im)
        elif i==1: plt.title('SVD spatial 1')
        elif i==2: plt.title('SVD spatial 2')
        if i==0: plt.ylabel('Non-filtered')
        if i==3: plt.ylabel('Gaussian filtered')

    ax = plt.subplot(rows, 1, 3)
    plt.plot(sta[max_i[0], max_i[1], :], label='center px')
    plt.plot(t1, label='Temporal 1')
    plt.plot(t2, label='Temporal 2')
    plf.spineless(ax, 'trlb')  # Turn off spines using custom function
    return fig
@author: ycan
"""
import numpy as np
import matplotlib.pyplot as plt
import plotfuncs as plf
import texplot

data = np.load('/home/ycan/Documents/thesis/analysis_auxillary_files/'
               'thesis_csiplotting.npz')
cells = data['cells']
csi = data['csi']
bias = data['bias']
groups = data['groups']
colorcategories = data['colorcategories']

#plt.figure(figsize=(6,6))
texplot.texfig(.8, 1)
ax3 = plt.subplot(111)
for color, group in zip(colorcategories, groups):
    ax3.plot(bias[:, group], color=color, linewidth=.4)
#plt.axis('equal')
ax3.set_xticks([0, 1])
ax3.set_xticklabels(['Mesopic', 'Photopic'])
ax3.set_ylabel('Polarity Index')
plf.spineless(ax3)

texplot.savefig('polarityindexchange')

plt.show()
Exemple #3
0
                         )

#%%

k_res, mu_res = res['x'][:-1], res['x'][-1]
fig2, axes2 = plt.subplots(1, 1)
[axk] = np.array([axes2]).ravel()
axk.plot(t[:filter_length], k_real, label='Real filter')
#axk.plot(t[:filter_length], k_res/np.abs(k_res).max(), label='Predicted')
axk.plot(t[:filter_length], k_res, label='Predicted')
axk.set_xlabel('Time[s]')
axk.legend()
axk.text(.8, .6, f'mu_real: {mu_real:4.2f}\nmu_res: {mu_res:4.2f}',
         transform=axk.transAxes)
axk.text(.98, .7, f'usegrad: {usegrad:}',
         transform=axk.transAxes, ha='right')
axk.set_ylim([-.8, 1.1])
plf.spineless(axes2, 'tr')
#plt.savefig('/media/owncloud/20181105_meeting_files/GLMsimulated_filter.pdf',
#            bbox_inches='tight')
plt.show()


plt.figure()
pred_fr = glm.glm_fr(k_res, mu_res, time_res)(x)

plt.plot(rate, lw=.6, label='Real firing rate')
plt.plot(pred_fr, lw=.6, label='Predicted firing rate')
#plt.plot(rate/time_res, lw=.6, label='rate/delta')
plt.show()
        expdata = iof.load(exp_name, stim)
        clusters = expdata['clusters']
        preframedur = expdata['preframe_duration']
        stimdur = expdata['stim_duration']
        clusterids = plf.clusters_to_ids(clusters)
        index = [i for i, cl in enumerate(clusterids)
                 if cl == clustertoplot][0]

        fr = expdata['all_frs'][index]
        t = expdata['t']
        baselines = expdata['baselines'][index]

        plotind = [1, 3, 5, 7][i + 2 * j]
        ax = plt.subplot(4, 2, plotind)
        ax.plot(t, fr, 'k', linewidth=.5)
        plf.spineless(ax)
        if cond == 'M':
            plf.drawonoff(ax, preframedur, stimdur, h=.1)
            plf.subplottext(['A', 'B'][j], ax, x=-0.1)
        elif cond != 'M' and j == 0:
            scalebars.add_scalebar(
                ax,
                matchx=False,
                sizex=.5,
                labelx='500 ms',
                matchy=False,
                sizey=30,
                labely='30 Hz',
                #                       labely=fr'{dist_set} $\upmu$m',
                hidey=False,
                barwidth=1.2,
Exemple #5
0
def stripesurround_SVD(exp_name, stimnrs, nrcomponents=5):
    """
    nrcomponents:
        first N components of singular value decomposition (SVD)
        will be used to reduce noise.
    """
    exp_dir = iof.exp_dir_fixer(exp_name)

    if isinstance(stimnrs, int):
        stimnrs = [stimnrs]

    for stimnr in stimnrs:
        data = iof.load(exp_name, stimnr)

        _, metadata = asc.read_spikesheet(exp_dir)
        px_size = metadata['pixel_size(um)']

        clusters = data['clusters']
        stas = data['stas']
        max_inds = data['max_inds']
        filter_length = data['filter_length']
        stx_w = data['stx_w']
        exp_name = data['exp_name']
        stimname = data['stimname']
        frame_duration = data['frame_duration']
        quals = data['quals']

        # Record which clusters are ignored during analysis
        try:
            included = data['included']
        except KeyError:
            included = [True] * clusters.shape[0]

        # Average STA values 100 ms around the brightest frame to
        # minimize noise
        cut_time = int(100 / (frame_duration * 1000) / 2)

        # Tolerance for distance between center and surround
        # distributions 60 μm
        dtol = int((60 / px_size) / 2)

        clusterids = plf.clusters_to_ids(clusters)

        fsize = int(700 / (stx_w * px_size))
        t = np.arange(filter_length) * frame_duration * 1000
        vscale = fsize * stx_w * px_size

        cs_inds = np.empty(clusters.shape[0])
        polarities = np.empty(clusters.shape[0])

        savepath = os.path.join(exp_dir, 'data_analysis', stimname)

        for i in range(clusters.shape[0]):
            sta = stas[i]
            max_i = max_inds[i]

            # From this point on, use the low-rank approximation
            # version
            sta_reduced = sumcomponent(nrcomponents, sta)

            try:
                sta_reduced, max_i = msc.cutstripe(sta_reduced, max_i,
                                                   fsize * 2)
            except ValueError as e:
                if str(e) == 'Cutting outside the STA range.':
                    included[i] = False
                    continue
                else:
                    print(f'Error while analyzing {stimname}\n' +
                          f'Index:{i}    Cluster:{clusterids[i]}')
                    raise

            # Isolate the time point from which the fit will
            # be obtained
            if max_i[1] < cut_time:
                max_i[1] = cut_time + 1
            fitv = np.mean(sta_reduced[:, max_i[1] - cut_time:max_i[1] +
                                       cut_time + 1],
                           axis=1)

            # Make a space vector
            s = np.arange(fitv.shape[0])

            if np.max(fitv) != np.max(np.abs(fitv)):
                onoroff = -1
            else:
                onoroff = 1
            polarities[i] = onoroff
            # Determine the peak values for center and surround
            # to give as initial parameters for curve fitting
            centerpeak = onoroff * np.max(fitv * onoroff)
            surroundpeak = onoroff * np.max(fitv * -onoroff)

            # Define initial guesses for the center and surround gaussians
            # First set of values are for center, second for surround.
            p_initial = [centerpeak, max_i[0], 2, surroundpeak, max_i[0], 8]
            if onoroff == 1:
                bounds = ([0, -np.inf, -np.inf, 0, max_i[0] - dtol, 4], [
                    np.inf, np.inf, np.inf, np.inf, max_i[0] + dtol, 20
                ])
            elif onoroff == -1:
                bounds = ([
                    -np.inf, -np.inf, -np.inf, -np.inf, max_i[0] - dtol, 4
                ], [0, np.inf, np.inf, 0, max_i[0] + dtol, 20])

            try:
                popt, _ = curve_fit(centersurround_onedim,
                                    s,
                                    fitv,
                                    p0=p_initial,
                                    bounds=bounds)
            except (ValueError, RuntimeError) as e:
                er = str(e)
                if (er == "`x0` is infeasible."
                        or er.startswith("Optimal parameters not found")):
                    popt, _ = curve_fit(onedgauss, s, fitv, p0=p_initial[:3])
                    popt = np.append(popt, [0, popt[1], popt[2]])
                elif er == "array must not contain infs or NaNs":
                    included[i] = False
                    continue
                else:
                    print(f'Error while analyzing {stimname}\n' +
                          f'Index:{i}    Cluster:{clusterids[i]}')
                    import pdb
                    pdb.set_trace()
                    raise

            fit = centersurround_onedim(s, *popt)
            popt[0] = popt[0] * onoroff
            popt[3] = popt[3] * onoroff

            csi = popt[3] / popt[0]
            cs_inds[i] = csi

            plt.figure(figsize=(10, 9))
            ax = plt.subplot(121)
            plf.stashow(sta_reduced, ax, extent=[0, t[-1], -vscale, vscale])
            ax.set_xlabel('Time [ms]')
            ax.set_ylabel('Distance [µm]')
            ax.set_title(f'Using first {nrcomponents} components of SVD',
                         fontsize='small')

            ax = plt.subplot(122)
            plf.spineless(ax)
            ax.set_yticks([])
            # We need to flip the vertical axis to match
            # with the STA next to it
            plt.plot(onoroff * fitv, -s, label='Data')
            plt.plot(onoroff * fit, -s, label='Fit')
            plt.axvline(0, linestyle='dashed', alpha=.5)
            plt.title(f'Center: a: {popt[0]:4.2f}, μ: {popt[1]:4.2f},' +
                      f' σ: {popt[2]:4.2f}\n' +
                      f'Surround: a: {popt[3]:4.2f}, μ: {popt[4]:4.2f},' +
                      f' σ: {popt[5]:4.2f}' + f'\n CS index: {csi:4.2f}')
            plt.subplots_adjust(top=.85)
            plt.suptitle(f'{exp_name}\n{stimname}\n{clusterids[i]} ' +
                         f'Q: {quals[i]:4.2f}')
            os.makedirs(os.path.join(savepath, 'stripesurrounds_SVD'),
                        exist_ok=True)
            plt.savefig(os.path.join(savepath, 'stripesurrounds_SVD',
                                     clusterids[i] + '.svg'),
                        bbox_inches='tight')
            plt.close()

        data.update({
            'cs_inds': cs_inds,
            'polarities': polarities,
            'included': included
        })
        np.savez(os.path.join(savepath, f'{stimnr}_data_SVD.npz'), **data)
        print(f'Surround plotted and saved for {stimname}.')
def checkerflickerplusanalyzer(exp_name,
                               stimulusnr,
                               clusterstoanalyze=None,
                               frametimingsfraction=None,
                               cutoff=4):
    """
    Analyzes checkerflicker-like data, typically interspersed
    stimuli in between chunks of checkerflicker.
    e.g. checkerflickerplusmovie, frozennoise

    Parameters:
    ----------
        exp_name:
            Experiment name.
        stimulusnr:
            Number of the stimulus to be analyzed.
        clusterstoanalyze:
            Number of clusters should be analyzed. Default is None.

            First N cells will be analyzed if this parameter is given.
            In case of long recordings it might make sense to first
            look at a subset of cells before starting to analyze
            the whole dataset.

        frametimingsfraction:
            Fraction of the recording to analyze. Should be a number
            between 0 and 1. e.g. 0.3 will analyze the first 30% of
            the whole recording.
        cutoff:
           Worst rating that is wanted for the analysis. Default
           is 4. The source of this value is manual rating of each
           cluster.
    """
    exp_dir = iof.exp_dir_fixer(exp_name)

    stimname = iof.getstimname(exp_dir, stimulusnr)

    exp_name = os.path.split(exp_dir)[-1]

    clusters, metadata = asc.read_spikesheet(exp_dir, cutoff=cutoff)

    # Check that the inputs are as expected.
    if clusterstoanalyze:
        if clusterstoanalyze > len(clusters[:, 0]):
            warnings.warn('clusterstoanalyze is larger '
                          'than number of clusters in dataset. '
                          'All cells will be included.')
            clusterstoanalyze = None
    if frametimingsfraction:
        if not 0 < frametimingsfraction < 1:
            raise ValueError('Invalid input for frametimingsfraction: {}. '
                             'It should be a number between 0 and 1'
                             ''.format(frametimingsfraction))

    scr_width = metadata['screen_width']
    scr_height = metadata['screen_height']

    refresh_rate = metadata['refresh_rate']

    parameters = asc.read_parameters(exp_dir, stimulusnr)

    stx_h = parameters['stixelheight']
    stx_w = parameters['stixelwidth']

    # Check whether any parameters are given for margins, calculate
    # screen dimensions.
    marginkeys = ['tmargin', 'bmargin', 'rmargin', 'lmargin']
    margins = []
    for key in marginkeys:
        margins.append(parameters.get(key, 0))

    # Subtract bottom and top from vertical dimension; left and right
    # from horizontal dimension
    scr_width = scr_width - sum(margins[2:])
    scr_height = scr_height - sum(margins[:2])

    nblinks = parameters['Nblinks']
    bw = parameters.get('blackwhite', False)

    # Gaussian stimuli are not supported yet, we need to ensure we
    # have a black and white stimulus
    if bw is not True:
        raise ValueError('Gaussian stimuli are not supported yet!')

    seed = parameters.get('seed', -1000)

    sx, sy = scr_height / stx_h, scr_width / stx_w

    # Make sure that the number of stimulus pixels are integers
    # Rounding down is also possible but might require
    # other considerations.
    if sx % 1 == 0 and sy % 1 == 0:
        sx, sy = int(sx), int(sy)
    else:
        raise ValueError('sx and sy must be integers')

    filter_length, frametimings = asc.ft_nblinks(exp_dir, stimulusnr)

    if parameters['stimulus_type'] in [
            'FrozenNoise', 'checkerflickerplusmovie'
    ]:
        runfr = parameters['RunningFrames']
        frofr = parameters['FrozenFrames']
        # To generate the frozen noise, a second seed is used.
        # The default value of this is -10000 as per StimulateOpenGL
        secondseed = parameters.get('secondseed', -10000)

        if parameters['stimulus_type'] == 'checkerflickerplusmovie':
            mblinks = parameters['Nblinksmovie']
            # Retrivee the number of frames (files) from parameters['path']
            ipath = PureWindowsPath(parameters['path']).as_posix()
            repldict = iof.config('stimuli_path_replace')
            for needle, repl in repldict.items():
                ipath = ipath.replace(needle, repl)
            ipath = os.path.normpath(ipath)  # Windows compatiblity
            moviefr = len([
                name for name in os.listdir(ipath)
                if os.path.isfile(os.path.join(ipath, name))
                and name.lower().endswith('.raw')
            ])
            noiselen = (runfr + frofr) * nblinks
            movielen = moviefr * mblinks
            triallen = noiselen + movielen

            ft_on, ft_off = asc.readframetimes(exp_dir,
                                               stimulusnr,
                                               returnoffsets=True)
            frametimings = np.empty(ft_on.shape[0] * 2, dtype=float)
            frametimings[::2] = ft_on
            frametimings[1::2] = ft_off

            import math
            ntrials = math.floor(frametimings.size / triallen)
            trials = np.zeros((ntrials, runfr + frofr + moviefr))
            for t in range(ntrials):
                frange = frametimings[t * triallen:(t + 1) * triallen]
                trials[t, :runfr + frofr] = frange[:noiselen][::nblinks]
                trials[t, runfr + frofr:] = frange[noiselen:][::mblinks]
            frametimings = trials.ravel()

            filter_length = np.int(np.round(.666 * refresh_rate / nblinks))

            # Add frozen movie to frozen noise (for masking)
            frofr += moviefr

    savefname = str(stimulusnr) + '_data'

    if clusterstoanalyze:
        clusters = clusters[:clusterstoanalyze, :]
        print('Analyzing first %s cells' % clusterstoanalyze)
        savefname += '_' + str(clusterstoanalyze) + 'cells'
    if frametimingsfraction:
        frametimingsindex = int(len(frametimings) * frametimingsfraction)
        frametimings = frametimings[:frametimingsindex]
        print('Analyzing first {}% of'
              ' the recording'.format(frametimingsfraction * 100))
        savefname += '_' + str(frametimingsfraction).replace('.',
                                                             '') + 'fraction'
    frame_duration = np.average(np.ediff1d(frametimings))
    total_frames = frametimings.shape[0]

    all_spiketimes = []
    # Store spike triggered averages in a list containing correct shaped
    # arrays
    stas = []

    for i in range(len(clusters[:, 0])):
        spiketimes = asc.read_raster(exp_dir, stimulusnr, clusters[i, 0],
                                     clusters[i, 1])

        spikes = asc.binspikes(spiketimes, frametimings)
        all_spiketimes.append(spikes)
        stas.append(np.zeros((sx, sy, filter_length)))

    # Separate out the repeated parts
    all_spiketimes = np.array(all_spiketimes)
    mask = runfreezemask(total_frames, runfr, frofr, refresh_rate)
    repeated_spiketimes = all_spiketimes[:, ~mask]
    run_spiketimes = all_spiketimes[:, mask]

    # We need to cut down the total_frames by the same amount
    # as spiketimes
    total_run_frames = run_spiketimes.shape[1]
    # To be able to use the same code as checkerflicker analyzer,
    # convert to list again.
    run_spiketimes = list(run_spiketimes)

    # Empirically determined to be best for 32GB RAM
    desired_chunk_size = 21600000

    # Length of the chunks (specified in number of frames)
    chunklength = int(desired_chunk_size / (sx * sy))

    chunksize = chunklength * sx * sy
    nrofchunks = int(np.ceil(total_run_frames / chunklength))

    print(f'\nAnalyzing {stimname}.\nTotal chunks: {nrofchunks}')

    time = startime = datetime.datetime.now()
    timedeltas = []

    quals = np.zeros(len(stas))

    frame_counter = 0

    for i in range(nrofchunks):
        randnrs, seed = randpy.ranb(seed, chunksize)
        # Reshape and change 0's to -1's
        stimulus = np.reshape(randnrs,
                              (sx, sy, chunklength), order='F') * 2 - 1
        del randnrs

        # Range of indices we are interested in for the current chunk
        if (i + 1) * chunklength < total_run_frames:
            chunkind = slice(i * chunklength, (i + 1) * chunklength)
            chunkend = chunklength
        else:
            chunkind = slice(i * chunklength, None)
            chunkend = total_run_frames - i * chunklength

        for k in range(filter_length, chunkend - filter_length + 1):
            stim_small = stimulus[:, :,
                                  k - filter_length + 1:k + 1][:, :, ::-1]
            for j in range(clusters.shape[0]):
                spikes = run_spiketimes[j][chunkind]
                if spikes[k] != 0:
                    stas[j] += spikes[k] * stim_small
        qual = np.array([])
        for c in range(clusters.shape[0]):
            qual = np.append(qual, asc.staquality(stas[c]))
        quals = np.vstack((quals, qual))

        # Draw progress bar
        width = 50  # Number of characters
        prog = i / (nrofchunks - 1)
        bar_complete = int(prog * width)
        bar_noncomplete = width - bar_complete
        timedeltas.append(msc.timediff(time))  # Calculate running avg
        avgelapsed = np.mean(timedeltas)
        elapsed = np.sum(timedeltas)
        etc = startime + elapsed + avgelapsed * (nrofchunks - i)
        sys.stdout.flush()
        sys.stdout.write('\r{}{} |{:4.1f}% ETC: {}'.format(
            '█' * bar_complete, '-' * bar_noncomplete, prog * 100,
            etc.strftime("%a %X")))
        time = datetime.datetime.now()
    sys.stdout.write('\n')

    # Remove the first row which is full of random nrs.
    quals = quals[1:, :]

    max_inds = []
    spikenrs = np.array([spikearr.sum() for spikearr in run_spiketimes])

    for i in range(clusters.shape[0]):
        with warnings.catch_warnings():
            warnings.filterwarnings('ignore', '.*true_divide*.')
            stas[i] = stas[i] / spikenrs[i]
        # Find the pixel with largest absolute value
        max_i = np.squeeze(
            np.where(np.abs(stas[i]) == np.max(np.abs(stas[i]))))
        # If there are multiple pixels with largest value,
        # take the first one.
        if max_i.shape != (3, ):
            try:
                max_i = max_i[:, 0]
            # If max_i cannot be found just set it to zeros.
            except IndexError:
                max_i = np.array([0, 0, 0])

        max_inds.append(max_i)

    print(f'Completed. Total elapsed time: {msc.timediff(startime)}\n' +
          f'Finished on {datetime.datetime.now().strftime("%A %X")}')

    savepath = os.path.join(exp_dir, 'data_analysis', stimname)
    if not os.path.isdir(savepath):
        os.makedirs(savepath, exist_ok=True)
    savepath = os.path.join(savepath, savefname)

    keystosave = [
        'clusters', 'frametimings', 'mask', 'repeated_spiketimes',
        'run_spiketimes', 'frame_duration', 'max_inds', 'nblinks', 'stas',
        'stx_h', 'stx_w', 'total_run_frames', 'sx', 'sy', 'filter_length',
        'stimname', 'exp_name', 'spikenrs', 'clusterstoanalyze',
        'frametimingsfraction', 'cutoff', 'quals', 'nrofchunks', 'chunklength'
    ]
    datadict = {}

    for key in keystosave:
        datadict[key] = locals()[key]

    np.savez(savepath, **datadict)

    t = (np.arange(nrofchunks) * chunklength * frame_duration) / refresh_rate
    qmax = np.max(quals, axis=0)
    qualsn = quals / qmax[np.newaxis, :]

    ax = plt.subplot(111)
    ax.plot(t, qualsn, alpha=0.3)
    plt.ylabel('Z-score of center pixel (normalized)')
    plt.xlabel('Minutes of stimulus analyzed')
    plt.ylim([0, 1])
    plf.spineless(ax, 'tr')
    plt.title(f'Recording duration optimization\n{exp_name}\n {savefname}')
    plt.savefig(savepath + '.svg', format='svg')
    plt.close()
Exemple #7
0
def allonoff(exp_name, stim_nrs):

    if isinstance(stim_nrs, int) or len(stim_nrs) <= 1:
        print('Multiple onoffsteps stimuli expected, '
              'allonoff analysis will be skipped.')
        return

    exp_dir = iof.exp_dir_fixer(exp_name)
    exp_name = os.path.split(exp_dir)[-1]

    for j, stim in enumerate(stim_nrs):
        data = iof.load(exp_name, stim)
        all_frs = data['all_frs']
        clusters = data['clusters']
        preframe_duration = data['preframe_duration']
        stim_duration = data['stim_duration']
        onoffbias = data['onoffbias']
        t = data['t']

        if j == 0:
            a = np.zeros((clusters.shape[0], t.shape[0], len(stim_nrs)))
            bias = np.zeros((clusters.shape[0], len(stim_nrs)))
        a[:, :, j] = np.array(all_frs)
        bias[:, j] = onoffbias

    plotpath = os.path.join(exp_dir, 'data_analysis', 'allonoff')
    clusterids = plf.clusters_to_ids(clusters)
    if not os.path.isdir(plotpath):
        os.makedirs(plotpath, exist_ok=True)

    for i in range(clusters.shape[0]):
        ax = plt.subplot(111)
        for j, stim in enumerate(stim_nrs):
            labeltxt = (
                iof.getstimname(exp_name, stim).replace('onoffsteps_', '') +
                f' Bias: {bias[i, j]:4.2f}')
            plt.plot(t, a[i, :, j], alpha=.5, label=labeltxt)
        plt.title(f'{exp_name}\n{clusterids[i]}')
        plt.legend()
        plf.spineless(ax)
        plf.drawonoff(ax, preframe_duration, stim_duration, h=.1)

        plt.savefig(os.path.join(plotpath, clusterids[i]) + '.svg',
                    format='svg',
                    dpi=300)
        plt.close()

    rows = len(stim_nrs)
    columns = 1
    _, axes = plt.subplots(rows, columns, sharex=True)
    colors = plt.get_cmap('tab10')

    for i, stim in enumerate(stim_nrs):
        ax = axes[i]
        with warnings.catch_warnings():
            warnings.filterwarnings('ignore', category=RuntimeWarning)
            ax.hist(bias[:, i],
                    bins=20,
                    color=colors(i),
                    range=[-1, 1],
                    alpha=.5)

        ax.set_ylabel(
            iof.getstimname(exp_name, stim).replace('onoffsteps_', ''))
        plf.spineless(ax)
    plt.suptitle(f'Distribution of On-Off Indices for {exp_name}')
    plt.subplots_adjust(top=.95)
    plt.xlabel('On-Off index')
    plt.savefig(os.path.join(exp_dir, 'data_analysis', 'onoffindex_dist.svg'),
                format='svg',
                dpi=300)
    plt.close()
Exemple #8
0
def OMBanalyzer(exp_name, stimnr, plotall=False, nr_bins=20):
    """
    Analyze responses to object moving background stimulus. STA and STC
    are calculated.

    Note that there are additional functions that make use of the
    OMB class. This function was written before the OMB class existed
    """
    # TODO
    # Add iteration over multiple stimuli

    exp_dir = iof.exp_dir_fixer(exp_name)
    exp_name = os.path.split(exp_dir)[-1]
    stimname = iof.getstimname(exp_dir, stimnr)

    parameters = asc.read_parameters(exp_name, stimnr)
    assert parameters['stimulus_type'] == 'objectsmovingbackground'
    stimframes = parameters.get('stimFrames', 108000)
    preframes = parameters.get('preFrames', 200)
    nblinks = parameters.get('Nblinks', 2)

    seed = parameters.get('seed', -10000)
    seed2 = parameters.get('objseed', -1000)

    stepsize = parameters.get('stepsize', 2)

    ntotal = int(stimframes / nblinks)

    clusters, metadata = asc.read_spikesheet(exp_name)

    refresh_rate = metadata['refresh_rate']
    filter_length, frametimings = asc.ft_nblinks(exp_name, stimnr, nblinks,
                                                 refresh_rate)
    frame_duration = np.ediff1d(frametimings).mean()
    frametimings = frametimings[:-1]

    if ntotal != frametimings.shape[0]:
        print(f'For {exp_name}\nstimulus {stimname} :\n'
              f'Number of frames specified in the parameters file ({ntotal}'
              f' frames) and frametimings ({frametimings.shape[0]}) do not'
              ' agree!'
              ' The stimulus was possibly interrupted during recording.'
              ' ntotal is changed to match actual frametimings.')
        ntotal = frametimings.shape[0]

    # Generate the numbers to be used for reconstructing the motion
    # ObjectsMovingBackground.cpp line 174, steps are generated in an
    # alternating fashion. We can generate all of the numbers at once
    # (total lengths is defined by stimFrames) and then assign
    # to x and y directions. Although there is more
    # stuff around line 538
    randnrs, seed = randpy.gasdev(seed, ntotal * 2)
    randnrs = np.array(randnrs) * stepsize

    xsteps = randnrs[::2]
    ysteps = randnrs[1::2]

    clusterids = plf.clusters_to_ids(clusters)

    all_spikes = np.empty((clusters.shape[0], ntotal))
    for i, (cluster, channel, _) in enumerate(clusters):
        spiketimes = asc.read_raster(exp_name, stimnr, cluster, channel)
        spikes = asc.binspikes(spiketimes, frametimings)
        all_spikes[i, :] = spikes

    # Collect STA for x and y movement in one array
    stas = np.zeros((clusters.shape[0], 2, filter_length))
    stc_x = np.zeros((clusters.shape[0], filter_length, filter_length))
    stc_y = np.zeros((clusters.shape[0], filter_length, filter_length))
    t = np.arange(filter_length) * 1000 / refresh_rate * nblinks
    for k in range(filter_length, ntotal - filter_length + 1):
        x_mini = xsteps[k - filter_length + 1:k + 1][::-1]
        y_mini = ysteps[k - filter_length + 1:k + 1][::-1]
        for i, (cluster, channel, _) in enumerate(clusters):
            if all_spikes[i, k] != 0:
                stas[i, 0, :] += all_spikes[i, k] * x_mini
                stas[i, 1, :] += all_spikes[i, k] * y_mini
                # Calculate non-centered STC (Cantrell et al., 2010)
                stc_x[i, :, :] += all_spikes[i, k] * calc_covar(x_mini)
                stc_y[i, :, :] += all_spikes[i, k] * calc_covar(y_mini)

    eigvals_x = np.zeros((clusters.shape[0], filter_length))
    eigvals_y = np.zeros((clusters.shape[0], filter_length))
    eigvecs_x = np.zeros((clusters.shape[0], filter_length, filter_length))
    eigvecs_y = np.zeros((clusters.shape[0], filter_length, filter_length))

    bins_x = np.zeros((clusters.shape[0], nr_bins))
    bins_y = np.zeros((clusters.shape[0], nr_bins))
    spikecount_x = np.zeros(bins_x.shape)
    spikecount_y = np.zeros(bins_x.shape)
    generators_x = np.zeros(all_spikes.shape)
    generators_y = np.zeros(all_spikes.shape)
    # Normalize STAs and STCs with respect to spike numbers
    for i in range(clusters.shape[0]):
        totalspikes = all_spikes.sum(axis=1)[i]
        stas[i, :, :] = stas[i, :, :] / totalspikes
        stc_x[i, :, :] = stc_x[i, :, :] / totalspikes
        stc_y[i, :, :] = stc_y[i, :, :] / totalspikes
        try:
            eigvals_x[i, :], eigvecs_x[i, :, :] = np.linalg.eigh(
                stc_x[i, :, :])
            eigvals_y[i, :], eigvecs_y[i, :, :] = np.linalg.eigh(
                stc_y[i, :, :])
        except np.linalg.LinAlgError:
            continue
        # Calculate the generator signals and nonlinearities
        generators_x[i, :] = np.convolve(eigvecs_x[i, :, -1],
                                         xsteps,
                                         mode='full')[:-filter_length + 1]
        generators_y[i, :] = np.convolve(eigvecs_y[i, :, -1],
                                         ysteps,
                                         mode='full')[:-filter_length + 1]
        spikecount_x[i, :], bins_x[i, :] = nlt.calc_nonlin(
            all_spikes[i, :], generators_x[i, :], nr_bins)
        spikecount_y[i, :], bins_y[i, :] = nlt.calc_nonlin(
            all_spikes[i, :], generators_y[i, :], nr_bins)
    savepath = os.path.join(exp_dir, 'data_analysis', stimname)
    if not os.path.isdir(savepath):
        os.makedirs(savepath, exist_ok=True)

    # Calculated based on last eigenvector
    magx = eigvecs_x[:, :, -1].sum(axis=1)
    magy = eigvecs_y[:, :, -1].sum(axis=1)
    r_ = np.sqrt(magx**2 + magy**2)
    theta_ = np.arctan2(magy, magx)
    # To draw the vectors starting from origin, insert zeros every other element
    r = np.zeros(r_.shape[0] * 2)
    theta = np.zeros(theta_.shape[0] * 2)
    r[1::2] = r_
    theta[1::2] = theta_
    plt.polar(theta, r)
    plt.gca().set_xticks(np.pi / 180 * np.array([0, 90, 180, 270]))
    plt.title(f'Population plot for motion STAs\n{exp_name}')
    plt.savefig(os.path.join(savepath, 'population.svg'))
    if plotall:
        plt.show()
    plt.close()

    for i in range(stas.shape[0]):
        stax = stas[i, 0, :]
        stay = stas[i, 1, :]
        ax1 = plt.subplot(211)
        ax1.plot(t, stax, label=r'STA$_{\rm X}$')
        ax1.plot(t, stay, label=r'STA$_{\rm Y}$')
        ax1.plot(t, eigvecs_x[i, :, -1], label='Eigenvector_X 0')
        ax1.plot(t, eigvecs_y[i, :, -1], label='Eigenvector_Y 0')
        plt.legend(fontsize='x-small')

        ax2 = plt.subplot(4, 4, 9)
        ax3 = plt.subplot(4, 4, 13)
        ax2.set_yticks([])
        ax2.set_xticklabels([])
        ax3.set_yticks([])
        ax2.set_title('Eigenvalues', size='small')
        ax2.plot(eigvals_x[i, :],
                 'o',
                 markerfacecolor='C0',
                 markersize=4,
                 markeredgewidth=0)
        ax3.plot(eigvals_y[i, :],
                 'o',
                 markerfacecolor='C1',
                 markersize=4,
                 markeredgewidth=0)
        ax4 = plt.subplot(2, 3, 5)
        ax4.plot(bins_x[i, :], spikecount_x[i, :] / frame_duration)
        ax4.plot(bins_y[i, :], spikecount_y[i, :] / frame_duration)
        ax4.set_ylabel('Firing rate [Hz]')
        ax4.set_title('Nonlinearities', size='small')
        plf.spineless([ax1, ax2, ax3, ax4], 'tr')
        ax5 = plt.subplot(2, 3, 6, projection='polar')
        ax5.plot(theta, r, color='k', alpha=.3)
        ax5.plot(theta[2 * i:2 * i + 2], r[2 * i:2 * i + 2], lw=3)
        ax5.set_xticklabels(['0', '', '', '', '180', '', '270', ''])
        ax5.set_title('Vector sum of X and Y STCs', size='small')
        plt.suptitle(f'{exp_name}\n{stimname}\n{clusterids[i]}')
        plt.subplots_adjust(hspace=.4)
        plt.savefig(os.path.join(savepath, clusterids[i] + '.svg'),
                    bbox_inches='tight')
        if plotall:
            plt.show()
        plt.close()

    keystosave = [
        'nblinks', 'all_spikes', 'clusters', 'frame_duration', 'eigvals_x',
        'eigvals_y', 'eigvecs_x', 'eigvecs_y', 'filter_length', 'magx', 'magy',
        'ntotal', 'r', 'theta', 'stas', 'stc_x', 'stc_y', 'bins_x', 'bins_y',
        'nr_bins', 'spikecount_x', 'spikecount_y', 'generators_x',
        'generators_y', 't'
    ]
    datadict = {}

    for key in keystosave:
        datadict[key] = locals()[key]

    npzfpath = os.path.join(savepath, str(stimnr) + '_data')
    np.savez(npzfpath, **datadict)
Exemple #9
0
        ax.spines[loc].set_color('orange')
        ax.spines[loc].set_linewidth(4)


np.random.seed(0)
fig = plt.figure()
for i in range(frames):
    d = i / 15
    ax = fig.add_axes([.5 - d, .5 - d, .7, .7])
    ax.matshow(np.random.randint(0, 2, s * s).reshape(s, s), cmap='Greys')
    plt.xticks([])
    plt.yticks([])
    setspines(ax)
#    plf.spineless(plt.gca())
#    plt.savefig(savedir+f'checker{i}.svg', bbox_inches='tight', pad_inches=0)
plt.show()

np.random.seed(0)
fig = plt.figure()

for i in range(frames):
    d = i / 15
    a = np.repeat(np.random.randint(0, 2, s), s)
    a = a.reshape(s, s)
    ax = fig.add_axes([.1 + d, .1 + d, .7, .7])
    ax.imshow(a, cmap='Greys')
    plt.xticks([])
    plt.yticks([])
    plf.spineless(plt.gca())
#    plt.savefig(savedir+f'stripe{i}.svg', bbox_inches='tight', pad_inches=0)
plt.show()
                        ax1, x=-.5, y=.5, rotation=90, va='center')
        plf.stashow(sta, ax1, extent=[0, t[-1], -vscale, vscale])
        ax1.set_xlabel('Time [ms]')
#        ax1.set_ylabel(r'Distance [$\upmu$m]')
        ax1.set_ylabel(r'Distance [μm]')

        fitv = np.mean(sta[:, max_i[1]-cut_time:max_i[1]+cut_time+1],
                       axis=1)

        s = np.arange(fitv.shape[0])

        ax2 = axes[2*j+1]
        plf.subplottext(['B', 'D'][j], ax2, x=-.1)
        plf.subplottext(f'Center-Surround Index: {csi:4.2f}',
                        ax2, x=.95, y=.15, fontsize=8, fontweight='normal')
        plf.spineless(ax2)
        ax2.set_yticks([])
        ax2.set_xticks([])
        ax2.plot(onoroff*fitv, -s, label='Data')
        # Displace the center of both distributions according to the difference
        fit = fit*onoroff
        surround_alt = np.abs(fit[fit<0].sum())
        center_alt = np.abs(fit[fit>0].sum())
        csi_alt = np.abs(fit[fit<0].sum())/(fit[fit>0].sum())
        csi_alts[j, i] = csi_alt
        print(surround_alt, center_alt, csi_alt)
        fit_cut = fit[2*fsize_diff:-2*fsize_diff]
        plf.subplottext(f'csi_alt: {csi_alt:4.2f}',
                ax2, x=.95, y=.05, fontsize=8, fontweight='normal')
        plf.subplottext(['', '■'][j], ax2, x=.50, y=.05, color=colors[i],
                        fontsize=8)
def stripesurround(exp_name, stimnrs):
    exp_dir = iof.exp_dir_fixer(exp_name)

    if isinstance(stimnrs, int):
        stimnrs = [stimnrs]

    for stimnr in stimnrs:
        data = iof.load(exp_name, stimnr)

        _, metadata = asc.read_spikesheet(exp_dir)
        px_size = metadata['pixel_size(um)']

        clusters = data['clusters']
        stas = data['stas']
        max_inds = data['max_inds']
        filter_length = data['filter_length']
        stx_w = data['stx_w']
        exp_name = data['exp_name']
        stimname = data['stimname']
        frame_duration = data['frame_duration']
        quals = data['quals']

        clusterids = plf.clusters_to_ids(clusters)

        fsize = int(700 / (stx_w * px_size))
        t = np.arange(filter_length) * frame_duration * 1000
        vscale = fsize * stx_w * px_size

        #%%
        cs_inds = np.empty(clusters.shape[0])
        polarities = np.empty(clusters.shape[0])

        savepath = os.path.join(exp_dir, 'data_analysis', stimname)

        for i in range(clusters.shape[0]):
            sta = stas[i]
            max_i = max_inds[i]

            sta, max_i = msc.cutstripe(sta, max_i, fsize * 2)
            plt.figure(figsize=(12, 10))
            ax = plt.subplot(121)
            plf.stashow(sta, ax)

            # Isolate the time point from which the fit will
            # be obtained
            fitv = sta[:, max_i[1]]
            # Make a space vector
            s = np.arange(fitv.shape[0])

            if np.max(fitv) != np.max(np.abs(fitv)):
                onoroff = -1
            else:
                onoroff = 1
            polarities[i] = onoroff
            # Determine the peak values for center and surround
            # to give as initial parameters for curve fitting
            centerpeak = -onoroff * np.max(fitv * onoroff)
            surroundpeak = -onoroff * np.max(fitv * -onoroff)

            # Define initial guesses for the center and surround gaussians
            # First set of values are for center, second for surround.
            p_initial = [centerpeak, max_i[0], 2, surroundpeak, max_i[0], 4]
            bounds = ([0, -np.inf, -np.inf, 0, -np.inf, -np.inf], np.inf)

            try:
                popt, _ = curve_fit(centersurround_onedim,
                                    s,
                                    fitv,
                                    p0=p_initial,
                                    bounds=bounds)
            except ValueError as e:
                if str(e) == "`x0` is infeasible.":
                    print(e)
                    popt, _ = curve_fit(onedgauss,
                                        s,
                                        onoroff * fitv,
                                        p0=p_initial[:3])
                    popt = np.append(popt, [0, popt[1], popt[2]])
                else:
                    raise
            fit = centersurround_onedim(s, *popt)

            # Avoid dividing by zero when calculating center-surround index
            if popt[3] > 0:
                csi = popt[0] / popt[3]
            else:
                csi = 0
            cs_inds[i] = csi
            ax = plt.subplot(122)
            plf.spineless(ax)
            ax.set_yticks([])

            # We need to flip the vertical axis to match
            # with the STA next to it
            plt.plot(onoroff * fitv, -s, label='Data')
            plt.plot(onoroff * fit, -s, label='Fit')
            plt.axvline(0, linestyle='dashed', alpha=.5)
            plt.title(f'Center: a: {popt[0]:4.2f}, μ: {popt[1]:4.2f},' +
                      f' σ: {popt[2]:4.2f}\n' +
                      f'Surround: a: {popt[3]:4.2f}, μ: {popt[4]:4.2f},' +
                      f' σ: {popt[5]:4.2f}' + f'\n CS index: {csi:4.2f}')
            plt.subplots_adjust(top=.82)
            plt.suptitle(f'{exp_name}\n{stimname}\n{clusterids[i]}')
            os.makedirs(os.path.join(savepath, 'stripesurrounds'),
                        exist_ok=True)
            plt.savefig(
                os.path.join(savepath, 'stripesurrounds',
                             clusterids[i] + '.svg'))
            plt.close()

        data.update({'cs_inds': cs_inds, 'polarities': polarities})
        np.savez(os.path.join(savepath, f'{stimnr}_data.npz'), **data)
fig = plt.figure(figsize=(8, 8))
gs = GridSpec(10, 10)
ax_main = plt.subplot(gs[1:9, :9])
ax_x = plt.subplot(gs[0, :9], sharex=ax_main)
ax_y = plt.subplot(gs[1:9, 9], sharey=ax_main)
scatterkwargs = {
    'c': colors,
    'alpha': .8,
    'linewidths': .5,
    'edgecolor': 'k',
    's': 35
}
ax_main.scatter(csi[0, :], csi[1, :], **scatterkwargs)

# Mark the example cells with an asterisk
asterixes = [
    (0.03443051, 0.19385925),  # Example ON cell 20180207 03001
    (0.03238909, 0.29553824)
]  # Example OFF cell 20180118 23102
for asterix in asterixes:
    ax_main.text(*asterix, '*', color='k')

bins = np.linspace(0, 0.4, 9)
histkwargs = {'bins': bins, 'color': 'k', 'alpha': .6}
ax_x.hist(csi[0, :], **histkwargs)
ax_y.hist(csi[1, :], orientation='horizontal', **histkwargs)
ax_x.set_axis_off()
ax_y.set_axis_off()
plf.spineless(ax_x)
plf.spineless(ax_y)
plt.show()
def saccadegratingsanalyzer(exp_name, stim_nr):
    """
    Analyze and save responses to saccadegratings stimulus.
    """

    exp_dir = iof.exp_dir_fixer(exp_name)
    exp_name = os.path.split(exp_dir)[-1]
    stimname = iof.getstimname(exp_dir, stim_nr)
    clusters, metadata = asc.read_spikesheet(exp_dir)
    clusterids = plf.clusters_to_ids(clusters)

    refresh_rate = metadata['refresh_rate']

    parameters = asc.read_parameters(exp_name, stim_nr)
    if parameters['stimulus_type'] != 'saccadegrating':
        raise ValueError('Unexpected stimulus type: '
                         f'{parameters["stimulus_type"]}')
    fixfr = parameters.get('fixationframes', 80)
    sacfr = parameters.get('saccadeframes', 10)
    barwidth = parameters.get('barwidth', 40)
    averageshift = parameters.get('averageshift', 2)
    # The seed is hard-coded in the Stimulator
    seed = -10000

    ftimes = asc.readframetimes(exp_dir, stim_nr)
    ftimes.resize(int(ftimes.shape[0] / 2), 2)
    nfr = ftimes.size
    # Re-generate the stimulus
    # Amplitude of the shift and the transition type (saccade or grey is
    # determined based on the output of ran1
    randnrs = np.array(randpy.ran1(seed, nfr)[0])

    # Separate the amplitude and transitions into two arrays
    stimpos = (4 * randnrs[::2]).astype(int)

    # Transition variable, determines whether grating is moving during
    # the transion or only a grey screen is presented.
    trans = np.array(randnrs[1::2] > 0.5)

    # Record before and after positions in a single array and remove
    # The first element b/c there is no before value
    stimposx = np.append(0, stimpos)[:-1]
    stimtr = np.stack((stimposx, stimpos), axis=1)[1:]
    trans = trans[:-1]

    saccadetr = stimtr[trans, :]
    greytr = stimtr[~trans, :]

    # Create a time vector with defined temporal bin size
    tstep = 0.01  # Bin size is defined here, unit is seconds
    trialduration = (fixfr + sacfr) / refresh_rate
    nrsteps = int(trialduration / tstep) + 1
    t = np.linspace(0, trialduration, num=nrsteps)

    # Collect saccade beginning time for each trial
    trials = ftimes[1:, 0]
    sacftimes = trials[trans]
    greyftimes = trials[~trans]

    sacspikes = np.empty((clusters.shape[0], sacftimes.shape[0], t.shape[0]))
    greyspikes = np.empty((clusters.shape[0], greyftimes.shape[0], t.shape[0]))
    # Collect all the psth in one array. The order is
    # transision type, cluster index, start pos, target pos, time
    psth = np.zeros((2, clusters.shape[0], 4, 4, t.size))

    for i, (chid, clid, _) in enumerate(clusters):
        spiketimes = asc.read_raster(exp_dir, stim_nr, chid, clid)
        for j, _ in enumerate(sacftimes):
            sacspikes[i, j, :] = asc.binspikes(spiketimes, sacftimes[j] + t)
        for k, _ in enumerate(greyftimes):
            greyspikes[i, k, :] = asc.binspikes(spiketimes, greyftimes[k] + t)

    # Sort trials according to the transition type
    # nton[i][j] contains the indexes of trials where saccade was i to j
    nton_sac = [[[] for _ in range(4)] for _ in range(4)]
    for i, trial in enumerate(saccadetr):
        nton_sac[trial[0]][trial[1]].append(i)
    nton_grey = [[[] for _ in range(4)] for _ in range(4)]
    for i, trial in enumerate(greytr):
        nton_grey[trial[0]][trial[1]].append(i)

    savedir = os.path.join(exp_dir, 'data_analysis', stimname)
    os.makedirs(savedir, exist_ok=True)
    for i in range(clusters.shape[0]):
        fig, axes = plt.subplots(4,
                                 4,
                                 sharex=True,
                                 sharey=True,
                                 figsize=(8, 8))
        for j in range(4):
            for k in range(4):
                # Start from bottom left corner
                ax = axes[3 - j][k]
                # Average all transitions of one type
                psth_sac = sacspikes[i, nton_sac[j][k], :].mean(axis=0)
                psth_grey = greyspikes[i, nton_grey[j][k], :].mean(axis=0)
                # Convert to spikes per second
                psth_sac = psth_sac / tstep
                psth_grey = psth_grey / tstep
                psth[0, i, j, k, :] = psth_sac
                psth[1, i, j, k, :] = psth_grey
                ax.axvline(sacfr / refresh_rate * 1000,
                           color='red',
                           linestyle='dashed',
                           linewidth=.5)
                ax.plot(t * 1000, psth_sac, label='Saccadic trans.')
                ax.plot(t * 1000, psth_grey, label='Grey trans.')
                ax.set_yticks([])
                ax.set_xticks([])
                # Cosmetics
                plf.spineless(ax)
                if j == k:
                    ax.set_facecolor((1, 1, 0, 0.15))
                if j == 0:
                    ax.set_xlabel(f'{k}')
                    if k == 3:
                        ax.legend(fontsize='xx-small', loc=0)
                if k == 0:
                    ax.set_ylabel(f'{j}')

        # Add an encompassing label for starting and target positions
        ax0 = fig.add_axes([0.08, 0.08, .86, .86])
        plf.spineless(ax0)
        ax0.patch.set_alpha(0)
        ax0.set_xticks([])
        ax0.set_yticks([])
        ax0.set_ylabel('Start position')
        ax0.set_xlabel('Target position')
        plt.suptitle(f'{exp_name}\n{stimname}\n{clusterids[i]}')
        plt.savefig(os.path.join(savedir, f'{clusterids[i]}.svg'))
        plt.close()
    # Save results
    keystosave = [
        'fixfr', 'sacfr', 't', 'averageshift', 'barwidth', 'seed', 'trans',
        'saccadetr', 'greytr', 'nton_sac', 'nton_grey', 'stimname',
        'sacspikes', 'greyspikes', 'psth', 'nfr', 'parameters'
    ]
    data_in_dict = {}
    for key in keystosave:
        data_in_dict[key] = locals()[key]

    np.savez(os.path.join(savedir, str(stim_nr) + '_data'), **data_in_dict)
    print(f'Analysis of {stimname} completed.')
Exemple #14
0
def spontanalyzer(exp_name, stim_nrs):
    """
    Analyze spontaneous activity, plot and save it. Will make a directory
    /data_analysis/<stimulus_name> and save svg [and pdf in subfolder.].

    """

    exp_dir = iof.exp_dir_fixer(exp_name)

    exp_name = os.path.split(exp_dir)[-1]

    if isinstance(stim_nrs, int):
        stim_nrs = [stim_nrs]
    elif len(stim_nrs) == 0:
        return

    for stim_nr in stim_nrs:
        stim_nr = str(stim_nr)

        stimname = iof.getstimname(exp_dir, stim_nr)

        clusters, _ = asc.read_spikesheet(exp_dir, cutoff=4)

        # Length of chunks we use for dividing the activity for plotting.
        step = 1

        allspikes = []

        for i in range(clusters.shape[0]):
            spikes = asc.read_raster(exp_dir, stim_nr, clusters[i, 0],
                                     clusters[i, 1])
            allspikes.append(spikes)

        # Use the time of the last spike to determine the total recording time.
        last_spike = np.max([np.max(allspikes[i])\
                             for i in range(clusters.shape[0])
                             if len(allspikes[i]) > 0])
        totalrecordingtime = np.int(np.ceil(last_spike) + 1)
        times = np.arange(0, totalrecordingtime, step)

        for i in range(len(clusters[:, 0])):
            spikes = allspikes[i]
            # Find which trial each spike belongs to, and subtract one
            # to be able to use as indices
            trial_indices = np.digitize(spikes, times) - 1

            rasterplot = []
            # Iterate over all the trials, create an empty array for each
            for j in range(totalrecordingtime):
                rasterplot.append([])
            # plt.eventplot requires a list containing spikes in each
            # trial separately
            for k in range(len(spikes)):
                trial = trial_indices[k]
                rasterplot[trial].append(spikes[k] - times[trial])

            # Workaround for matplotlib issue #6412.
            # https://github.com/matplotlib/matplotlib/issues/6412
            # If a cell has no spikes for the first trial i.e. the first
            # element of the list is empty, an error is raised due to
            # a plt.eventplot bug.
            if len(rasterplot[0]) == 0:
                rasterplot[0] = [-1]

            plt.figure(figsize=(9, 6))
            ax1 = plt.subplot(111)
            plt.eventplot(rasterplot, linewidth=.5, color='k')
            # Set the axis so they align with the rectangles
            plt.axis([0, step, -1, len(rasterplot)])

            plt.suptitle('{}\n{}'.format(exp_name, stimname))
            plt.title('{:0>3}{:0>2} Rating: {}'.format(clusters[i][0],
                                                       clusters[i][1],
                                                       clusters[i][2]))
            plt.ylabel('Time index')
            plt.xlabel('Time[s]')
            plt.gca().invert_yaxis()
            ax1.set_xticks([0, .5, 1])
            plf.spineless(ax1)

            savedir = os.path.join(exp_dir, 'data_analysis', stimname)
            os.makedirs(os.path.join(savedir, 'pdf'), exist_ok=True)

            # Save as svg for looking through data, pdf for
            # inserting into presentations
            plt.savefig(
                savedir +
                '/{:0>3}{:0>2}.svg'.format(clusters[i, 0], clusters[i, 1]),
                format='svg',
                bbox_inches='tight')
            plt.savefig(os.path.join(
                savedir, 'pdf', '{:0>3}'
                '{:0>2}.pdf'.format(clusters[i, 0], clusters[i, 1])),
                        format='pdf',
                        bbox_inches='tight')
            plt.close()
        print(f'Analysis of {stimname} completed.')
Exemple #15
0
    # Correcting for Mahalonobis dist.
    with warnings.catch_warnings():
        warnings.filterwarnings('ignore',
                                '.*divide by zero*.', RuntimeWarning)
        Zm = np.log((Z-pars[0])/pars[1])
    Zm[np.isinf(Zm)] = np.nan
    Zm = np.sqrt(Zm*-2)

    ax1 = plt.subplot(rows, columns, 1)
    plf.subplottext('A', ax1)

    vmax = np.abs(fit_frame).max()
    vmin = -vmax
    im = plf.stashow(fit_frame, ax1)
    ax1.set_aspect('equal')
    plf.spineless(ax1)
    ax1.set_xticks([])
    ax1.set_yticks([])

    checkercolors = ['black', 'orange']

    with warnings.catch_warnings():
        warnings.filterwarnings('ignore', category=UserWarning)
        warnings.filterwarnings('ignore', '.*invalid value encountered*.')
        ax1.contour(Y, X, Zm, [inner_b, outer_b], linewidths=.5,
                   cmap=plf.RFcolormap(checkercolors))

    barsize_set_checker = 100 # micrometers
    checker_scalebarsize = barsize_set_checker/(stx_h*px_size)

    scalebars.add_scalebar(ax1,
Exemple #16
0
import matplotlib.pyplot as plt
import plotfuncs as plf

data = np.load('/home/ycan/Documents/thesis/analysis_auxillary_files/'
               'thesis_csiplotting.npz')
cells = data['cells']
csi = data['csi']
bias = data['bias']
groups = data['groups']
colorcategories = data['colorcategories']

fig, axes = plt.subplots(2, 1, sharex=True)
ax1, ax2 = axes.ravel()

#ax1 = plt.subplot(211)
#ax2 = plt.subplot(212)

bins = np.arange(-1, 1 + .125, .125)

#ax1.set_xticks([])
ax2.set_xlabel('Polarity Index')
ax1.set_title('Mesopic')
ax2.set_title('Photopic')
plf.spineless(ax1, 'tr')
plf.spineless(ax2, 'tr')

ax1.hist(bias[0, :], bins=bins)
ax2.hist(bias[1, :], bins=bins)

plt.show()
Exemple #17
0
def plotstripestas(exp_name, stim_nrs):
    """
    Plot and save all the STAs from multiple stripe flicker stimuli.
    """
    exp_dir = iof.exp_dir_fixer(exp_name)

    _, metadata = asc.read_spikesheet(exp_dir)
    px_size = metadata['pixel_size(um)']

    if isinstance(stim_nrs, int):
        stim_nrs = [stim_nrs]
    elif len(stim_nrs) == 0:
        return

    for stim_nr in stim_nrs:
        data = iof.load(exp_name, stim_nr)

        clusters = data['clusters']
        stas = data['stas']
        filter_length = data['filter_length']
        stx_w = data['stx_w']
        exp_name = data['exp_name']
        stimname = data['stimname']
        frame_duration = data['frame_duration']
        quals = data['quals']

        clusterids = plf.clusters_to_ids(clusters)

        # Determine frame size so that the total frame covers
        # an area large enough i.e. 2*700um
        t = np.arange(filter_length) * frame_duration * 1000
        vscale = int(stas[0].shape[0] * stx_w * px_size / 1000)
        for i in range(clusters.shape[0]):
            sta = stas[i]

            vmax = np.max(np.abs(sta))
            vmin = -vmax
            plt.figure(figsize=(6, 15))
            ax = plt.subplot(111)
            im = ax.imshow(sta,
                           cmap='RdBu',
                           vmin=vmin,
                           vmax=vmax,
                           extent=[0, t[-1], -vscale, vscale],
                           aspect='auto')
            plt.xlabel('Time [ms]')
            plt.ylabel('Distance [mm]')

            plf.spineless(ax)
            plf.colorbar(im, ticks=[vmin, 0, vmax], format='%.2f', size='2%')
            plt.suptitle(f'{exp_name}\n{stimname}\n'
                         f'{clusterids[i]} Rating: {clusters[i][2]}\n'
                         f'STA quality: {quals[i]:4.2f}')
            plt.subplots_adjust(top=.90)
            savepath = os.path.join(exp_dir, 'data_analysis', stimname, 'STAs')
            if not os.path.isdir(savepath):
                os.makedirs(savepath, exist_ok=True)
            plt.savefig(os.path.join(savepath, clusterids[i] + '.svg'),
                        bbox_inches='tight')
            plt.close()
        print(f'Plotting of {stimname} completed.')
Exemple #18
0
def plotsvd(file, f_size=10, filter_size=1):

    data = np.load(file)
    filename = os.path.split(file)[-1].split('.')[0]

    sta = data['sta_unscaled']
    max_i = data['max_i']

    sta, max_i = mf.cut_around_center(sta, max_i, f_size=f_size)

    fit_frame = sta[:, :, max_i[2]]

    # %%
    sp1, sp2, t1, t2, u, v = mf.svd(sta)

    sp1_filtered = ndi.filters.gaussian_filter(sp1,
                                               sigma=(filter_size,
                                                      filter_size))
    sp2_filtered = ndi.filters.gaussian_filter(sp2,
                                               sigma=(filter_size,
                                                      filter_size))
    ff_filtered = ndi.filters.gaussian_filter(fit_frame,
                                              sigma=(filter_size, filter_size))

    plotthese = [fit_frame, sp1, sp2, ff_filtered, sp1_filtered, sp2_filtered]

    fig = plt.figure(dpi=130)
    plt.suptitle('{}\n frame size: {}'.format(filename, f_size))
    rows = 3
    columns = 3
    vmax = .7
    vmin = -vmax

    for i in range(6):
        ax = plt.subplot(rows, columns, i + 1)
        im = plt.imshow(plotthese[i],
                        vmin=vmin,
                        vmax=vmax,
                        cmap=plf.RFcolormap())
        ax.set_aspect('equal')
        plt.xticks([])
        plt.yticks([])
        for child in ax.get_children():
            if isinstance(child, matplotlib.spines.Spine):
                child.set_color('C{}'.format(i % 3))
                child.set_linewidth(2)
        if i == 0:
            plt.title('center px')
            fig.colorbar(im)
        elif i == 1:
            plt.title('SVD spatial 1')
        elif i == 2:
            plt.title('SVD spatial 2')
        if i == 0: plt.ylabel('Non-filtered')
        if i == 3:
            plt.ylabel('Gaussian filtered')
            from mpl_toolkits.axes_grid1.anchored_artists import AnchoredSizeBar

            scalebar = AnchoredSizeBar(ax.transData,
                                       3,
                                       '180 µm',
                                       'lower right',
                                       pad=.5,
                                       color='grey',
                                       frameon=False,
                                       size_vertical=.3)
            ax.add_artist(scalebar)

    ax = plt.subplot(rows, 1, 3)
    plt.plot(sta[max_i[0], max_i[1], :], label='center px')
    plt.plot(t1, label='Temporal 1')
    plt.plot(t2, label='Temporal 2')
    plt.axis([0, 20, -.75, .5])
    plf.spineless(ax, 'trlb')  # Turn off spines using custom function
    return fig
def omb_contrastmotion2dnonlin(exp,
                               stim,
                               nbins_nlt=9,
                               cmap='Greys',
                               plot3d=False):
    """
    Calculate and plot the 2D nonlinearities for the OMB stimulus. The
    magnitude of the stimulus projection on quadratic motion filters
    from GQM is used for the motion.

    Parameters:
    ------
        nbins_nlt:
            Number of bins to be used for dividing the generator signals
            into ranges with equal number of samples.
        plot3d:
            Whether to additionally create a 3D version of the nonlinearity.
    """

    st = OMB(exp, stim)

    # Motion and contrast
    data_cm = np.load(
        os.path.join(st.exp_dir, 'data_analysis', st.stimname,
                     'GQM_motioncontrast', f'{stim}_GQM_motioncontrast.npz'))

    qall = data_cm['Qall']
    kall = data_cm['kall']
    muall = data_cm['muall']
    cross_corrs = data_cm['cross_corrs']

    allspikes = st.allspikes()

    stim_mot = st.bgsteps.copy()

    # Bin dimension should be one greater than nonlinearity for pcolormesh
    # compatibility. Otherwise the last row and column of nonlinearity is not
    # plotted.
    all_bins_c = np.zeros((st.nclusters, nbins_nlt + 1))
    all_bins_r = np.zeros((st.nclusters, nbins_nlt + 1))
    nonlinearities = np.zeros((st.nclusters, nbins_nlt, nbins_nlt))

    label = '2D-nonlin_magQ_motion_kcontrast'

    savedir = os.path.join(st.stim_dir, label)
    os.makedirs(savedir, exist_ok=True)

    for i in range(st.nclusters):
        stim_con = st.contrast_signal_cell(i).squeeze()

        # Project the motion stimulus onto the quadratic filter
        generator_x = gqm.conv2d(qall[i, 0, :], stim_mot[0, :])
        generator_y = gqm.conv2d(qall[i, 1, :], stim_mot[1, :])

        # Calculate the magnitude of the vector formed by motion generators
        generators = np.vstack([generator_x, generator_y])
        r = np.sqrt(np.sum(generators**2, axis=0))

        # Project the contrast stimulus onto the linear filter
        generator_c = np.convolve(stim_con, kall[i, 2, :],
                                  'full')[:-st.filter_length + 1]
        spikes = allspikes[i, :]

        nonlinearity, bins_c, bins_r = nlt.calc_nonlin_2d(spikes,
                                                          generator_c,
                                                          r,
                                                          nr_bins=nbins_nlt)
        nonlinearity /= st.frame_duration

        all_bins_c[i, :] = bins_c
        all_bins_r[i, :] = bins_r
        nonlinearities[i, ...] = nonlinearity

        X, Y = np.meshgrid(bins_c, bins_r, indexing='ij')

        fig = plt.figure()

        gs = gsp.GridSpec(5, 5)
        axmain = plt.subplot(gs[1:, :-1])
        axx = plt.subplot(gs[0, :-1], sharex=axmain)
        axy = plt.subplot(gs[1:, -1], sharey=axmain)

        # Normally subplots turns off shared axis tick labels but
        # Gridspec does not do this
        plt.setp(axx.get_xticklabels(), visible=False)
        plt.setp(axy.get_yticklabels(), visible=False)

        im = axmain.pcolormesh(X, Y, nonlinearity, cmap=cmap)
        plf.integerticks(axmain)

        cb = plt.colorbar(im)
        cb.outline.set_linewidth(0)
        cb.ax.set_xlabel('spikes/s')
        cb.ax.xaxis.set_label_position('top')

        plf.integerticks(cb.ax, 4, which='y')
        plf.integerticks(axx, 1, which='y')
        plf.integerticks(axy, 1, which='x')

        barkwargs = dict(alpha=.3, facecolor='k', linewidth=.5, edgecolor='w')

        axx.bar(nlt.bin_midpoints(bins_c),
                nonlinearity.mean(axis=1),
                width=np.ediff1d(bins_c),
                **barkwargs)
        axy.barh(nlt.bin_midpoints(bins_r),
                 nonlinearity.mean(axis=0),
                 height=np.ediff1d(bins_r),
                 **barkwargs)
        plf.spineless(axx, 'b')
        plf.spineless(axy, 'l')

        axmain.set_xlabel('Projection onto linear contrast filter')
        axmain.set_ylabel(
            'Magnitude of projection onto quadratic motion filters')
        fig.suptitle(
            f'{st.exp_foldername}\n{st.stimname}\n{st.clids[i]} '
            f'2D nonlinearity nsp: {st.allspikes()[i, :].sum():<5.0f}')

        plt.subplots_adjust(top=.85)
        fig.savefig(os.path.join(savedir, st.clids[i]), bbox_inches='tight')
        plt.show()

        if plot3d:
            if i == 0:
                from mpl_toolkits import mplot3d
            from matplotlib.ticker import MaxNLocator
            #%%
            fig = plt.figure()
            ax = plt.axes(projection='3d')
            ax.plot_surface(X,
                            Y,
                            nonlinearity,
                            cmap='YlGn',
                            edgecolors='k',
                            linewidths=0.2)
            ax.set_xlabel('Projection onto linear contrast filter')
            ax.set_ylabel(
                'Magnitude of projection onto quadratic motion filters')

            ax.set_zlabel(r'Firing rate [sp/s]')
            ax.view_init(elev=30, azim=-135)

            ax.xaxis.set_major_locator(MaxNLocator(integer=True))
            ax.yaxis.set_major_locator(MaxNLocator(integer=True))
            ax.zaxis.set_major_locator(MaxNLocator(integer=True))

    keystosave = ['nonlinearities', 'all_bins_c', 'all_bins_r', 'nbins_nlt']
    datadict = {}

    for key in keystosave:
        datadict.update({key: locals()[key]})
    npzfpath = os.path.join(savedir, f'{st.stimnr}_{label}.npz')
    np.savez(npzfpath, **datadict)
Exemple #20
0
ax = plt.subplot2grid((4, 3), (0, 0), colspan=3, rowspan=2)

# Create an array for all the colors to use with plt.legend()
patches = []
for color, label in zip(colorcategories, colorlabels):
    patches.append(mpatches.Patch(color=color, label=label))
ax.legend(handles=patches, fontsize='xx-small')

for group, color in zip(groups, colorcategories):

    ax.scatter(csichange[group], biaschange[group], c=color, **scatterkwargs)
    ax.axhline(0, **linekwargs)
    ax.axvline(0, **linekwargs)
    plf.subplottext('A', ax, x=-0.05)
ax.set_xlabel(r'CSI$_{photopic}$ - CSI$_{mesopic}$')
ax.set_ylabel(r'PI$_{photopic}$ - PI$_{mesopic}$')

for i, (group, color) in enumerate(zip(groups, colorcategories)):
    ax = plt.subplot2grid((4, 3), (2 + int((np.round((i - 1) / 3))), i % 3))
    ax.scatter(csichange, biaschange, c='grey')
    ax.scatter(csichange[group], biaschange[group], c=color, **scatterkwargs)
    ax.axhline(0, **linekwargs)
    ax.axvline(0, **linekwargs)
    plf.spineless(ax, 'tr')
    plf.subplottext(['B', 'C', 'D', 'E', 'F'][i], ax, x=-.25)

#axes[-1].set_axis_off()
plt.subplots_adjust(hspace=.45, wspace=.45)
#texplot.savefig('csichangevsbiaschange')
plt.show()
Exemple #21
0
def csindexchange(exp_name, onoffcutoff=.5, qualcutoff=9):
    """
    Plots the change in center surround indexes in different light
    levels. Also classifies based on ON-OFF index from the onoffsteps
    stimulus at the matching light level.
    """

    # For now there are only three experiments with the
    # different light levels and the indices of stimuli
    # are different. To automate it will be tricky and
    # ROI is just not enough to justify; so they are
    # hard coded.
    if '20180124' in exp_name or '20180207' in exp_name:
        stripeflicker = [6, 12, 17]
        onoffs = [3, 8, 14]
    elif '20180118' in exp_name:
        stripeflicker = [7, 14, 19]
        onoffs = [3, 10, 16]

    exp_dir = iof.exp_dir_fixer(exp_name)
    exp_name = os.path.split(exp_dir)[-1]
    clusternr = asc.read_spikesheet(exp_name)[0].shape[0]

    # Collect all CS indices, on-off indices and quality scores
    csinds = np.zeros((3, clusternr))
    quals = np.zeros((3, clusternr))

    onoffinds = np.zeros((3, clusternr))
    for i, stim in enumerate(onoffs):
        onoffinds[i, :] = iof.load(exp_name, stim)['onoffbias']

    for i, stim in enumerate(stripeflicker):
        data = iof.load(exp_name, stim)
        quals[i, :] = data['quals']
        csinds[i, :] = data['cs_inds']

    csinds_f = np.copy(csinds)
    quals_f = np.copy(quals)
    onoffbias_f = np.copy(onoffinds)

    # Filter them according to the quality cutoff value
    # and set excluded ones to NaN
    for j in range(quals.shape[1]):
        if not np.all(quals[:, j] > qualcutoff):
            quals_f[:, j] = np.nan
            csinds_f[:, j] = np.nan
            onoffbias_f[:, j] = np.nan

    # Define the color for each point depending on each cell's ON-OFF index
    # by appending the color name in an array.
    colors = []
    for j in range(onoffbias_f.shape[1]):
        if np.all(onoffbias_f[:, j] > onoffcutoff):
            # If it stays ON througout
            colors.append('blue')
        elif np.all(onoffbias_f[:, j] < -onoffcutoff):
            # If it stays OFF throughout
            colors.append('red')
        elif (np.all(onoffcutoff > onoffbias_f[:, j])
              and np.all(onoffbias_f[:, j] > -onoffcutoff)):
            # If it's ON-OFF throughout
            colors.append('black')
        else:
            colors.append('white')

    scatterkwargs = {'c': colors, 'alpha': .6, 'linewidths': 0}

    colorcategories = ['blue', 'red', 'black']
    colorlabels = ['ON', 'OFF', 'ON-OFF']

    # Create an array for all the colors to use with plt.legend()
    patches = []
    for color, label in zip(colorcategories, colorlabels):
        patches.append(mpatches.Patch(color=color, label=label))

    x = [np.nanmin(csinds_f), np.nanmax(csinds_f)]

    plt.figure(figsize=(12, 6))
    ax1 = plt.subplot(121)
    plt.legend(handles=patches, fontsize='small')
    plt.scatter(csinds_f[0, :], csinds_f[1, :], **scatterkwargs)
    plt.plot(x, x, 'r--', alpha=.5)
    plt.xlabel('Low 1')
    plt.ylabel('High')

    ax1.set_aspect('equal')
    plf.spineless(ax1)

    ax2 = plt.subplot(122)
    plt.scatter(csinds_f[0, :], csinds_f[2, :], **scatterkwargs)
    plt.plot(x, x, 'r--', alpha=.5)
    plt.xlabel('Low 1')
    plt.ylabel('Low 2')
    ax2.set_aspect('equal')
    plf.spineless(ax2)

    plt.suptitle(f'Center-Surround Index Change\n{exp_name}')
    plt.text(.8,
             -0.1,
             f'qualcutoff:{qualcutoff} onoffcutoff:{onoffcutoff}',
             fontsize='small',
             transform=ax2.transAxes)
    plotsave = os.path.join(exp_dir, 'data_analysis', 'csinds')
    plt.savefig(plotsave + '.svg', format='svg', bbox_inches='tight')
    plt.savefig(plotsave + '.pdf', format='pdf', bbox_inches='tight')
    plt.show()
    plt.close()
Exemple #22
0
def fffanalyzer(exp_name, stimnrs):
    """
    Analyzes and plots data from full field flicker
    stimulus.
    """
    exp_dir = iof.exp_dir_fixer(exp_name)
    exp_name = os.path.split(exp_dir)[-1]

    if isinstance(stimnrs, int):
        stimnrs = [stimnrs]

    for stimnr in stimnrs:
        stimnr = str(stimnr)

        stimname = iof.getstimname(exp_name, stimnr)

        clusters, metadata = asc.read_spikesheet(exp_dir)

        parameters = asc.read_parameters(exp_dir, stimnr)

        clusterids = plf.clusters_to_ids(clusters)

        refresh_rate = metadata['refresh_rate']

        if parameters['stixelheight'] < 600 or parameters['stixelwidth'] < 800:
            raise ValueError('Make sure the stimulus is full field flicker.')

        nblinks = parameters['Nblinks']

        bw = parameters.get('blackwhite', False)

        seed = parameters.get('seed', -10000)

        filter_length, frametimings = asc.ft_nblinks(exp_dir, stimnr)

        frame_duration = np.average(np.ediff1d(frametimings))
        total_frames = frametimings.shape[0]

        all_spiketimes = []
        # Store spike triggered averages in a list containing correct shaped
        # arrays
        stas = []
        # Make a list for covariances of the spike triggered ensemble
        covars = []
        for i in range(len(clusters[:, 0])):
            spiketimes = asc.read_raster(exp_dir, stimnr,
                                         clusters[i, 0], clusters[i, 1])
            spikes = asc.binspikes(spiketimes, frametimings)
            all_spiketimes.append(spikes)
            stas.append(np.zeros(filter_length))
            covars.append(np.zeros((filter_length, filter_length)))

        if bw:
            randnrs, seed = randpy.ranb(seed, total_frames)
            # Since ranb returns zeros and ones, we need to convert the zeros
            # into -1s.
            stimulus = np.array(randnrs) * 2 - 1
        else:
            randnrs, seed = randpy.gasdev(seed, total_frames)
            stimulus = np.array(randnrs)

        for k in range(filter_length, total_frames-filter_length+1):
            stim_small = stimulus[k-filter_length+1:k+1][::-1]
            for j in range(clusters.shape[0]):
                spikes = all_spiketimes[j]
                if spikes[k] != 0:
                    stas[j] += spikes[k]*stim_small
                    # This trick is needed to use .T for tranposing
                    stim_small_n = stim_small[np.newaxis, :]
                    # Calculate the covariance as the weighted outer product
                    # of small stimulus(i.e. snippet) with itself
                    # This is non-centered STC (a la Cantrell et al., 2010)
                    covars[j] += spikes[k]*(np.dot(stim_small_n.T,
                                                   stim_small_n))
        spikenrs = np.array([spikearr.sum() for spikearr in all_spiketimes])

        plotpath = os.path.join(exp_dir, 'data_analysis',
                                stimname, 'filters')
        if not os.path.isdir(plotpath):
            os.makedirs(plotpath, exist_ok=True)

        t = np.arange(filter_length)*frame_duration*1000

        eigvals = [np.zeros((filter_length)) for i in range(clusters.shape[0])]
        eigvecs = [np.zeros((filter_length,
                             filter_length)) for i in range(clusters.shape[0])]

        for i in range(clusters.shape[0]):
            stas[i] = stas[i]/spikenrs[i]
            covars[i] = covars[i]/spikenrs[i]
            try:
                eigvals[i], eigvecs[i] = np.linalg.eigh(covars[i])
            except np.linalg.LinAlgError:
                eigvals[i] = np.full((filter_length), np.nan)
                eigvecs[i] = np.full((filter_length, filter_length), np.nan)
            fig = plt.figure(figsize=(9, 6))
            ax = plt.subplot(111)
            ax.plot(t, stas[i], label='STA')
            ax.plot(t, eigvecs[i][:, 0], label='STC component 1', alpha=.5)
            ax.plot(t, eigvecs[i][:, -1], label='STC component 2', alpha=.5)
            # Add eigenvalues as inset
            ax2 = fig.add_axes([.65, .15, .2, .2])
            # Highlight the first and second components which are plotted
            ax2.plot(0, eigvals[i][0], 'o',
                     markersize=7, markerfacecolor='C1', markeredgewidth=0)
            ax2.plot(filter_length-1, eigvals[i][-1], 'o',
                     markersize=7, markerfacecolor='C2', markeredgewidth=0)
            ax2.plot(eigvals[i], 'ko', alpha=.5, markersize=4,
                     markeredgewidth=0)
            ax2.set_axis_off()
            plf.spineless(ax)
            ax.set_xlabel('Time[ms]')
            ax.set_title(f'{exp_name}\n{stimname}\n{clusterids[i]} Rating:'
                         f' {clusters[i, 2]} {int(spikenrs[i])} spikes')
            plt.savefig(os.path.join(plotpath, clusterids[i])+'.svg',
                        format='svg', dpi=300)
            plt.close()

        savepath = os.path.join(os.path.split(plotpath)[0], stimnr+'_data')

        keystosave = ['stas', 'clusters', 'frame_duration', 'all_spiketimes',
                      'stimname', 'total_frames', 'spikenrs', 'bw', 'nblinks',
                      'filter_length', 'exp_name', 'covars', 'eigvals',
                      'eigvecs']
        data_in_dict = {}
        for key in keystosave:
            data_in_dict[key] = locals()[key]

        np.savez(savepath, **data_in_dict)
        print(f'Analysis of {stimname} completed.')
Exemple #23
0
def plotcheckersurround(exp_name, stim_nr, filename=None, spikecutoff=1000,
                        ratingcutoff=4, staqualcutoff=0, inner_b=2,
                        outer_b=4):

    """
    Divides into center and surround by fitting 2D Gaussian, and plot
    temporal components.

    spikecutoff:
        Minimum number of spikes to include.

    ratingcutoff:
        Minimum spike sorting rating to include.

    staqualcutoff:
        Minimum STA quality (as measured by z-score) to include.

    inner_b:
        Defined limit between receptive field center and surround
        in units of sigma.

    outer_b:
        Defined limit of the end of receptive field surround.
    """

    exp_dir = iof.exp_dir_fixer(exp_name)
    stim_nr = str(stim_nr)
    if filename:
        filename = str(filename)

    if not filename:
        savefolder = 'surroundplots'
        label = ''
    else:
        label = filename.strip('.npz')
        savefolder = 'surroundplots_' + label

    _, metadata = asc.read_spikesheet(exp_name)
    px_size = metadata['pixel_size(um)']

    data = iof.load(exp_name, stim_nr, fname=filename)

    clusters = data['clusters']
    stas = data['stas']
    stx_h = data['stx_h']
    exp_name = data['exp_name']
    stimname = data['stimname']
    max_inds = data['max_inds']
    frame_duration = data['frame_duration']
    filter_length = data['filter_length']
    quals = data['quals'][-1, :]

    spikenrs = data['spikenrs']

    c1 = np.where(spikenrs > spikecutoff)[0]
    c2 = np.where(clusters[:, 2] <= ratingcutoff)[0]
    c3 = np.where(quals > staqualcutoff)[0]

    choose = [i for i in range(clusters.shape[0]) if ((i in c1) and
                                                      (i in c2) and
                                                      (i in c3))]
    clusters = clusters[choose]
    stas = list(np.array(stas)[choose])
    max_inds = list(np.array(max_inds)[choose])

    clusterids = plf.clusters_to_ids(clusters)

    t = np.arange(filter_length)*frame_duration*1000

    # Determine frame size so that the total frame covers
    # an area large enough i.e. 2*700um
    f_size = int(700/(stx_h*px_size))

    del data

    for i in range(clusters.shape[0]):

        sta_original = stas[i]
        max_i_original = max_inds[i]

        try:
            sta, max_i = mf.cut_around_center(sta_original,
                                              max_i_original, f_size)
        except ValueError:
            continue

        fit_frame = sta[:, :, max_i[2]]

        if np.max(fit_frame) != np.max(np.abs(fit_frame)):
            onoroff = -1
        else:
            onoroff = 1



        Y, X = np.meshgrid(np.arange(fit_frame.shape[1]),
                           np.arange(fit_frame.shape[0]))

        with warnings.catch_warnings():
            warnings.filterwarnings('ignore',
                                    '.*divide by zero*.', RuntimeWarning)
            pars = gfit.gaussfit(fit_frame*onoroff)
            f = gfit.twodgaussian(pars)
            Z = f(X, Y)

        # Correcting for Mahalonobis dist.
        with warnings.catch_warnings():
            warnings.filterwarnings('ignore',
                                    '.*divide by zero*.', RuntimeWarning)
            Zm = np.log((Z-pars[0])/pars[1])
        Zm[np.isinf(Zm)] = np.nan
        Zm = np.sqrt(Zm*-2)

        ax = plt.subplot(1, 2, 1)

        plf.stashow(fit_frame, ax)
        ax.set_aspect('equal')

        with warnings.catch_warnings():
            warnings.filterwarnings('ignore', category=UserWarning)
            warnings.filterwarnings('ignore', '.*invalid value encountered*.')
            ax.contour(Y, X, Zm, [inner_b, outer_b],
                       cmap=plf.RFcolormap(('C0', 'C1')))

        barsize = 100/(stx_h*px_size)
        scalebar = AnchoredSizeBar(ax.transData,
                                   barsize, '100 µm',
                                   'lower left',
                                   pad=1,
                                   color='k',
                                   frameon=False,
                                   size_vertical=.2)
        ax.add_artist(scalebar)

        with warnings.catch_warnings():
            warnings.filterwarnings('ignore',
                                    '.*invalid value encountered in*.',
                                    RuntimeWarning)
            center_mask = np.logical_not(Zm < inner_b)
            center_mask_3d = np.broadcast_arrays(sta,
                                                 center_mask[..., None])[1]
            surround_mask = np.logical_not(np.logical_and(Zm > inner_b,
                                                          Zm < outer_b))
            surround_mask_3d = np.broadcast_arrays(sta,
                                                   surround_mask[..., None])[1]

        sta_center = np.ma.array(sta, mask=center_mask_3d)
        sta_surround = np.ma.array(sta, mask=surround_mask_3d)

        sta_center_temporal = np.mean(sta_center, axis=(0, 1))
        sta_surround_temporal = np.mean(sta_surround, axis=(0, 1))

        ax1 = plt.subplot(1, 2, 2)
        l1 = ax1.plot(t, sta_center_temporal,
                      label='Center\n(<{}σ)'.format(inner_b),
                      color='C0')
        sct_max = np.max(np.abs(sta_center_temporal))
        ax1.set_ylim(-sct_max, sct_max)
        ax2 = ax1.twinx()
        l2 = ax2.plot(t, sta_surround_temporal,
                      label='Surround\n({}σ<x<{}σ)'.format(inner_b, outer_b),
                      color='C1')
        sst_max = np.max(np.abs(sta_surround_temporal))
        ax2.set_ylim(-sst_max, sst_max)
        plf.spineless(ax1)
        plf.spineless(ax2)
        ax1.tick_params('y', colors='C0')
        ax2.tick_params('y', colors='C1')
        plt.xlabel('Time[ms]')
        plt.axhline(0, linestyle='dashed', linewidth=1)

        lines = l1+l2
        labels = [line.get_label() for line in lines]
        plt.legend(lines, labels, fontsize=7)
        plt.title('Temporal components')
        plt.suptitle(f'{exp_name}\n{stimname}\n{clusterids[i]}')

        plt.subplots_adjust(wspace=.5, top=.85)

        plotpath = os.path.join(exp_dir, 'data_analysis',
                                stimname, savefolder)
        if not os.path.isdir(plotpath):
            os.makedirs(plotpath, exist_ok=True)

        plt.savefig(os.path.join(plotpath, clusterids[i])+'.svg',
                    format='svg', dpi=300)
        plt.close()
    print(f'Plotted checkerflicker surround for {stimname}')
def omb_contrastmotion2dnonlin_Qcomps(exp, stim, nbins_nlt=9, cmap='Greys'):
    """
    Calculate and plot the 2D nonlinearities for the OMB stimulus. Multiple
    components of the matrix Q for the motion.

    Parameters:
    ------
        nbins_nlt:
            Number of bins to be used for dividing the generator signals
            into ranges with equal number of samples.
    """

    st = OMB(exp, stim)

    # Motion and contrast
    data_cm = np.load(os.path.join(st.exp_dir, 'data_analysis',
                                   st.stimname, 'GQM_motioncontrast_val',
                                   f'{stim}_GQM_motioncontrast_val.npz'))

    qall = data_cm['Qall']
    kall = data_cm['kall']
    muall = data_cm['muall']

    eigvecs = data_cm['eigvecs']
    eigvals = data_cm['eigvals']

    eiginds = [-1, 0]  # activating, suppressing #HINT

    cross_corrs = data_cm['cross_corrs']

    allspikes = st.allspikes()

    stim_mot = st.bgsteps.copy()

    # Bin dimension should be one greater than nonlinearity for pcolormesh
    # compatibility. Otherwise the last row and column of nonlinearity is not
    # plotted.
    all_bins_c = np.zeros((st.nclusters, nbins_nlt+1))
    all_bins_r = np.zeros((st.nclusters, nbins_nlt+1))
    nonlinearities = np.zeros((st.nclusters, nbins_nlt, nbins_nlt))

    label = '2D-nonlin_Qallcomps_motion_kcontrast'

    row_labels = ['Activating', 'Suppresive']
    column_labels = ['X', 'Y', r'$\sqrt{X^2 + Y^2}$']

    savedir = os.path.join(st.stim_dir, label)
    os.makedirs(savedir, exist_ok=True)

    for i in range(st.nclusters):
        stim_con = st.contrast_signal_cell(i).squeeze()

        n = 3  # x, y, xy
        m = 2  # activating, suppressing
        fig = plt.figure(figsize=(n*5, m*5), constrained_layout=True)
        gs = fig.add_gridspec(m, n)
        axes = []
        for _, eachgs in enumerate(gs):
            subgs = eachgs.subgridspec(2, 3, width_ratios=[4, 1, .2], height_ratios=[1, 4])
            mainax = fig.add_subplot(subgs[1, 0])
            axx = fig.add_subplot(subgs[0, 0], sharex=mainax)
            axy = fig.add_subplot(subgs[1, 1], sharey=mainax)
            cbax = fig.add_subplot(subgs[1, 2])
            axes.append([axx, mainax, axy, cbax])

        for k, eigind in enumerate(eiginds):
            generator_x = np.convolve(eigvecs[i, 0, :, eigind],
                                      stim_mot[0, :], 'full')[:-st.filter_length+1]
            generator_y = np.convolve(eigvecs[i, 1, :, eigind],
                                      stim_mot[1, :], 'full')[:-st.filter_length+1]

            # Calculate the magnitude of the vector formed by motion generators
            generators = np.vstack([generator_x, generator_y])
            generator_xy = np.sqrt(np.sum(generators**2, axis=0))

            # Project the contrast stimulus onto the linear filter
            generator_c = np.convolve(stim_con,
                                      kall[i, 2, :],
                                      'full')[:-st.filter_length+1]
            spikes = allspikes[i, :]

            generators_motion = [generator_x, generator_y, generator_xy]

            for l, direction in enumerate(column_labels):
                nonlinearity, bins_c, bins_r = nlt.calc_nonlin_2d(spikes,
                                                                  generator_c,
                                                                  generators_motion[l],
                                                                  nr_bins=nbins_nlt)
                nonlinearity /= st.frame_duration

                all_bins_c[i, :] = bins_c
                all_bins_r[i, :] = bins_r
                nonlinearities[i, ...] = nonlinearity

                X, Y = np.meshgrid(bins_c, bins_r, indexing='ij')

                subaxes = axes[k*n+l]

                axmain = subaxes[1]
                axx = subaxes[0]
                axy = subaxes[2]
                cbax = subaxes[3]

                # Normally subplots turns off shared axis tick labels but
                # Gridspec does not do this
                plt.setp(axx.get_xticklabels(), visible=False)
                plt.setp(axy.get_yticklabels(), visible=False)

                im = axmain.pcolormesh(X, Y, nonlinearity, cmap=cmap)
                plf.integerticks(axmain, 6, which='xy')

                cb = plt.colorbar(im, cax=cbax)
                cb.outline.set_linewidth(0)
                cb.ax.set_xlabel('spikes/s')
                cb.ax.xaxis.set_label_position('top')

                plf.integerticks(cb.ax, 4, which='y')
                plf.integerticks(axx, 1, which='y')
                plf.integerticks(axy, 1, which='x')

                barkwargs = dict(alpha=.3, facecolor='k',
                                 linewidth=.5, edgecolor='w')

                axx.bar(nlt.bin_midpoints(bins_c), nonlinearity.mean(axis=1),
                        width=np.ediff1d(bins_c), **barkwargs)
                axy.barh(nlt.bin_midpoints(bins_r), nonlinearity.mean(axis=0),
                         height=np.ediff1d(bins_r), **barkwargs)
                plf.spineless(axx, 'b')
                plf.spineless(axy, 'l')

                if k == 0 and l == 0:
                    axmain.set_xlabel('Projection onto linear contrast filter')
                    axmain.set_ylabel(f'Projection onto Q component')
                if k == 0:
                    axx.set_title(direction)
                if l == 0:
                    axmain.text(-.3, .5, row_labels[k],
                                va='center',
                                rotation=90,
                                transform=axmain.transAxes)

        fig.suptitle(f'{st.exp_foldername}\n{st.stimname}\n{st.clids[i]} '
                     f'2D nonlinearity nsp: {st.allspikes()[i, :].sum():<5.0f}')

        plt.subplots_adjust(top=.85)
        fig.savefig(os.path.join(savedir, st.clids[i]), bbox_inches='tight')
        plt.show()

    keystosave = ['nonlinearities', 'all_bins_c', 'all_bins_r', 'nbins_nlt']
    datadict = {}

    for key in keystosave:
        datadict.update({key: locals()[key]})
    npzfpath = os.path.join(savedir, f'{st.stimnr}_{label}.npz')
    np.savez(npzfpath, **datadict)
def randomizestripes(label, exp_name='20180124', stim_nrs=6):
    exp_dir = iof.exp_dir_fixer(exp_name)

    if isinstance(stim_nrs, int):
        stim_nrs = [stim_nrs]

    for stim_nr in stim_nrs:
        stimname = iof.getstimname(exp_name, stim_nr)

        clusters, metadata = asc.read_spikesheet(exp_dir)

        parameters = asc.read_parameters(exp_dir, stim_nr)

        scr_width = metadata['screen_width']
        px_size = metadata['pixel_size(um)']

        stx_w = parameters['stixelwidth']
        stx_h = parameters['stixelheight']

        if (stx_h/stx_w) < 2:
            raise ValueError('Make sure the stimulus is stripeflicker.')

        sy = scr_width/stx_w
#        sy = sy*4
        sy = int(sy)

        nblinks = parameters['Nblinks']
        try:
            bw = parameters['blackwhite']
        except KeyError:
            bw = False

        try:
            seed = parameters['seed']
            initialseed = parameters['seed']
        except KeyError:
            seed = -10000
            initialseed = -10000

        if nblinks == 1:
            ft_on, ft_off = asc.readframetimes(exp_dir, stim_nr,
                                               returnoffsets=True)
            # Initialize empty array twice the size of one of them, assign
            # value from on or off to every other element.
            frametimings = np.empty(ft_on.shape[0]*2, dtype=float)
            frametimings[::2] = ft_on
            frametimings[1::2] = ft_off
            # Set filter length so that temporal filter is ~600 ms.
            # The unit here is number of frames.
            filter_length = 40
        elif nblinks == 2:
            frametimings = asc.readframetimes(exp_dir, stim_nr)
            filter_length = 20
        else:
            raise ValueError('Unexpected value for nblinks.')

        # Omit everything that happens before the first 10 seconds
        cut_time = 10

        frame_duration = np.average(np.ediff1d(frametimings))
        total_frames = int(frametimings.shape[0]/4)

        all_spiketimes = []
        # Store spike triggered averages in a list containing correct
        # shaped arrays
        stas = []

        for i in range(len(clusters[:, 0])):
            spikes_orig = asc.read_raster(exp_dir, stim_nr,
                                         clusters[i, 0], clusters[i, 1])
            spikesneeded = spikes_orig.shape[0]*1000

            spiketimes = np.random.random_sample(spikesneeded)*spikes_orig.max()
            spiketimes = np.sort(spiketimes)
            spikes = asc.binspikes(spiketimes, frametimings)
            all_spiketimes.append(spikes)
            stas.append(np.zeros((sy, filter_length)))

        if bw:
            randnrs, seed = randpy.ran1(seed, sy*total_frames)
#            randnrs = mersennetw(sy*total_frames, seed1=seed)
            randnrs = [1 if i > .5 else -1 for i in randnrs]
        else:
            randnrs, seed = randpy.gasdev(seed, sy*total_frames)

        stimulus = np.reshape(randnrs, (sy, total_frames), order='F')
        del randnrs

        for k in range(filter_length, total_frames-filter_length+1):
            stim_small = stimulus[:, k-filter_length+1:k+1][:, ::-1]
            for j in range(clusters.shape[0]):
                spikes = all_spiketimes[j]
                if spikes[k] != 0 and frametimings[k]>cut_time:
                    stas[j] += spikes[k]*stim_small

        max_inds = []

        spikenrs = np.array([spikearr.sum() for spikearr in all_spiketimes])

        quals = np.array([])

        for i in range(clusters.shape[0]):
            stas[i] = stas[i]/spikenrs[i]
            # Find the pixel with largest absolute value
            max_i = np.squeeze(np.where(np.abs(stas[i])
                                        == np.max(np.abs(stas[i]))))
            # If there are multiple pixels with largest value,
            # take the first one.
            if max_i.shape != (2,):
                try:
                    max_i = max_i[:, 0]
                # If max_i cannot be found just set it to zeros.
                except IndexError:
                    max_i = np.array([0, 0])

            max_inds.append(max_i)

            quals = np.append(quals, asc.staquality(stas[i]))

#        savefname = str(stim_nr)+'_data'
#        savepath = pjoin(exp_dir, 'data_analysis', stimname)
#
#        exp_name = os.path.split(exp_dir)[-1]
#
#        if not os.path.isdir(savepath):
#            os.makedirs(savepath, exist_ok=True)
#        savepath = os.path.join(savepath, savefname)
#
#        keystosave = ['stas', 'max_inds', 'clusters', 'sy',
#                      'frame_duration', 'all_spiketimes', 'stimname',
#                      'total_frames', 'stx_w', 'spikenrs', 'bw',
#                      'quals', 'nblinks', 'filter_length', 'exp_name']
#        data_in_dict = {}
#        for key in keystosave:
#            data_in_dict[key] = locals()[key]
#
#        np.savez(savepath, **data_in_dict)
#        print(f'Analysis of {stimname} completed.')


        clusterids = plf.clusters_to_ids(clusters)

#        assert(initialseed.ty)
        correction = corrector(sy, total_frames, filter_length, initialseed)
        correction = np.outer(correction, np.ones(filter_length))

        t = np.arange(filter_length)*frame_duration*1000
        vscale = int(stas[0].shape[0] * stx_w*px_size/1000)
        for i in range(clusters.shape[0]):
            sta = stas[i]-correction

            vmax = 0.03
            vmin = -vmax
            plt.figure(figsize=(6, 15))
            ax = plt.subplot(111)
            im = ax.imshow(sta, cmap='RdBu', vmin=vmin, vmax=vmax,
                           extent=[0, t[-1], -vscale, vscale], aspect='auto')
            plt.xlabel('Time [ms]')
            plt.ylabel('Distance [mm]')

            plf.spineless(ax)
            plf.colorbar(im, ticks=[vmin, 0, vmax], format='%.2f', size='2%')
            plt.suptitle('{}\n{}\n'
                         '{} Rating: {}\n'
                         'nrofspikes {:5.0f}'.format(exp_name,
                                                       stimname,
                                                       clusterids[i],
                                                       clusters[i][2],
                                                       spikenrs[i]))
            plt.subplots_adjust(top=.90)
            savepath = os.path.join(exp_dir, 'data_analysis',
                                    stimname, 'STAs_randomized')
            svgpath = pjoin(savepath, label)
            if not os.path.isdir(svgpath):
                os.makedirs(svgpath, exist_ok=True)
            plt.savefig(os.path.join(svgpath, clusterids[i]+'.svg'),
                        bbox_inches='tight')
            plt.close()

    os.system(f"convert -delay 25 {svgpath}/*svg {savepath}/animated_{label}.gif")
def OMSpatchesanalyzer(exp_name, stim_nrs):
    """
    Analyze and plot the responses to object motion patches stimulus.
    """

    exp_dir = iof.exp_dir_fixer(exp_name)

    exp_name = os.path.split(exp_dir)[-1]

    if isinstance(stim_nrs, int):
        stim_nrs = [stim_nrs]
    elif len(stim_nrs) == 0:
        return

    clusters, metadata = asc.read_spikesheet(exp_dir, cutoff=4)
    clusterids = plf.clusters_to_ids(clusters)
    all_omsi = np.empty((clusters.shape[0], len(stim_nrs)))
    stimnames = []
    for stim_index, stim_nr in enumerate(stim_nrs):
        stim_nr = str(stim_nr)

        stimname = iof.getstimname(exp_dir, stim_nr)
        stimnames.append(stimname)

        parameters = asc.read_parameters(exp_dir, stim_nr)

        refresh_rate = metadata['refresh_rate']

        nblinks = parameters.get('Nblinks', 1)
        seed = parameters.get('seed', -10000)
        stim_duration = parameters.get('stimFrames', 1400)
        # The duration in the parameters refers to the total duration of both
        # epochs. We divide by two to get the length of a single stim_duration
        stim_duration = int(stim_duration / 2)
        prefr_duration = parameters.get('preFrames', 100)

        frametimings = asc.readframetimes(exp_dir, stim_nr)

        # ntrials is the number of trials containing both
        ntrials = np.floor((frametimings.shape[0] / (stim_duration + 1))) / 2
        ntrials = ntrials.astype(int)
        frametimings_rs = frametimings[:ntrials * 2 * (stim_duration + 1)]
        frametimings_rs = frametimings_rs.reshape(
            (ntrials * 2, stim_duration + 1))

        ft_local = frametimings_rs[::2][:, :-1]
        ft_global = frametimings_rs[1::2][:, :-1]

        localspikes = np.empty((clusters.shape[0], ntrials, stim_duration))
        globalspikes = np.empty((clusters.shape[0], ntrials, stim_duration))

        for i, cluster in enumerate(clusters):
            spikes = asc.read_raster(exp_name, stim_nr, cluster[0], cluster[1])
            for j in range(ntrials):
                localspikes[i, j, :] = asc.binspikes(spikes, ft_local[j, :])
                globalspikes[i, j, :] = asc.binspikes(spikes, ft_global[j, :])

        response_local = localspikes.mean(axis=1)
        response_global = globalspikes.mean(axis=1)

        # Differential and coherent firing rates
        fr_d = response_local.mean(axis=1)
        fr_c = response_global.mean(axis=1)

        # Calculate object motion sensitivity index (OMSI) as defined in
        # Kühn et al, 2016
        # There the first second of each trial is discarded, here it does not
        # seem to be very different from the rest.
        omsi = (fr_d - fr_c) / (fr_d + fr_c)

        # Create a time array for plotting
        time = np.linspace(0,
                           stim_duration * 2 / refresh_rate,
                           num=stim_duration)

        savepath = os.path.join(exp_dir, 'data_analysis', stimname)
        if not os.path.isdir(savepath):
            os.makedirs(savepath, exist_ok=True)

        for i, cluster in enumerate(clusters):
            gs = gridspec.GridSpec(2, 1)
            ax1 = plt.subplot(gs[0])
            ax2 = plt.subplot(gs[1])

            rastermat = np.vstack(
                (localspikes[i, :, :], globalspikes[i, :, :]))
            ax1.matshow(rastermat, cmap='Greys')
            ax1.axhline(ntrials - 1, color='r', lw=.1)
            ax1.plot([0, 0], [ntrials, 0])
            ax1.plot([0, 0], [ntrials * 2, ntrials])
            ax1.set_xticks([])
            ax1.set_yticks([])
            plf.spineless(ax1)

            ax2.plot(time, response_local[i, :], label='Local')
            ax2.plot(time, response_global[i, :], label='Global')
            ax2.set_xlabel('Time [s]')
            ax2.set_ylabel('Average firing rate [au]')
            ax2.set_xlim([time.min(), time.max()])
            plf.spineless(ax2, 'tr')
            ax2.legend(fontsize='x-small')

            plt.suptitle(f'{exp_name}\n{stimname}\n'
                         f'{clusterids[i]} OMSI: {omsi[i]:4.2f}')
            plt.tight_layout()
            plt.savefig(os.path.join(savepath, clusterids[i] + '.svg'),
                        bbox_inches='tight')
            plt.close()
        keystosave = [
            'nblinks', 'refresh_rate', 'stim_duration', 'prefr_duration',
            'ntrials', 'response_local', 'response_global', 'fr_d', 'fr_c',
            'omsi', 'clusters'
        ]
        datadict = {}

        for key in keystosave:
            datadict[key] = locals()[key]

        npzfpath = os.path.join(savepath, str(stim_nr) + '_data')
        np.savez(npzfpath, **datadict)
        all_omsi[:, stim_index] = omsi
    print(f'Analysis of {stimname} completed.')
    # Draw the distribution of the OMSI for all OMSI stimuli
    # If there is only one OMS stimulus, draw it in the same folder
    # If there are multiple stimuli, save it in the data analysis folder
    if len(stim_nrs) == 1:
        pop_plot_savepath = os.path.join(savepath, 'omsi_population.svg')
    else:
        pop_plot_savepath = os.path.split(savepath)[0]
        pop_plot_savepath = os.path.join(pop_plot_savepath, 'all_omsi.svg')

    plt.figure(figsize=(5, 2 * len(stim_nrs)))
    ax2 = plt.subplot(111)
    for j, stim_nr in enumerate(stim_nrs):
        np.random.seed(j)
        ax2.scatter(all_omsi[:, j],
                    j + (np.random.random(omsi.shape) - .5) / 1.1)
    np.random.seed()
    ax2.set_yticks(np.arange(len(stim_nrs)))
    ax2.set_yticklabels(stimnames, fontsize='xx-small', rotation='45')
    ax2.set_xlabel('Object-motion sensitivity index')
    ax2.set_title(f'{exp_name}\nDistribution of OMSI')
    plf.spineless(ax2, 'tr')
    plt.savefig(pop_plot_savepath, bbox_inches='tight')
    plt.close()
def onoffstepsanalyzer(exp_name, stim_nrs):
    """
    Analyze onoffsteps data, plot and save it. Will make a directory
    /data_analysis/<stimulus_name> and save svg [and pdf in subfolder.].

    Parameters:
        exp_name:
            Experiment name.
        stim_nr:
            Order of the onoff steps stimulus.

    """

    exp_dir = iof.exp_dir_fixer(exp_name)

    exp_name = os.path.split(exp_dir)[-1]

    if isinstance(stim_nrs, int):
        stim_nrs = [stim_nrs]

    for stim_nr in stim_nrs:
        stim_nr = str(stim_nr)

        stimname = iof.getstimname(exp_dir, stim_nr)

        clusters, metadata = asc.read_spikesheet(exp_dir, cutoff=4)

        clusterids = plf.clusters_to_ids(clusters)

        parameters = asc.read_parameters(exp_dir, stim_nr)

        refresh_rate = metadata['refresh_rate']

        # Divide by the refresh rate to convert from number of
        # frames to seconds
        pars_stim_duration = parameters['Nframes'] / refresh_rate

        pars_preframe_duration = parameters.get('preframes', 0) / refresh_rate

        if pars_preframe_duration == 0:
            nopreframe = True
            nr_periods = 2
        else:
            nopreframe = False
            nr_periods = 4
        # The first trial will be discarded by dropping the first four frames
        # If we don't save the original and re-initialize for each cell,
        # frametimings will get smaller over time.
        frametimings_original = asc.readframetimes(exp_dir, stim_nr)

        trial_durs = stim_prefr_durations_frametimes(frametimings_original,
                                                     nr_per=nr_periods)
        avg_trial_durs = trial_durs.mean(axis=0)

        if not nopreframe:
            stim_duration = avg_trial_durs[1::2].mean()
            preframe_duration = avg_trial_durs[::2].mean()
        else:
            stim_duration = avg_trial_durs.mean()
            preframe_duration = 0
            warnings.warn('On-off steps analysis with no preframes'
                          'is not tested, proceed with caution.')

        contrast = parameters['contrast']

        total_cycle = avg_trial_durs.sum()

        # Set the bins to be 10 ms
        tstep = 0.01
        bins = int(total_cycle / tstep) + 1
        t = np.linspace(0, total_cycle, num=bins)

        # Setup for onoff bias calculation
        onbegin = preframe_duration
        onend = onbegin + stim_duration
        offbegin = onend + preframe_duration
        offend = offbegin + stim_duration

        # Determine the indices for each period
        a = []
        for i in [onbegin, onend, offbegin, offend]:
            yo = np.asscalar(np.where(np.abs(t - i) < tstep / 1.5)[0][-1])
            a.append(yo)

        # To exclude stimulus offset affecting the bias, use
        # last 1 second of preframe period
        prefs = []
        for i in [onbegin - 1, onbegin, offbegin - 1, offbegin]:
            yo = np.asscalar(np.where(np.abs(t - i) < tstep / 1.5)[0][-1])
            prefs.append(yo)

        onper = slice(a[0], a[1])
        offper = slice(a[2], a[3])

        pref1 = slice(prefs[0], prefs[1])
        pref2 = slice(prefs[2], prefs[3])

        onoffbias = np.empty(clusters.shape[0])
        baselines = np.empty(clusters.shape[0])

        savedir = os.path.join(exp_dir, 'data_analysis', stimname)
        os.makedirs(os.path.join(savedir, 'pdf'), exist_ok=True)

        # Collect all firing rates in a list
        all_frs = []

        for i in range(len(clusters[:, 0])):
            spikes = asc.read_raster(exp_dir, stim_nr, clusters[i, 0],
                                     clusters[i, 1])
            frametimings = frametimings_original
            # Discard all the spikes that happen after the last frame
            spikes = spikes[spikes < frametimings[-1]]
            # Discard the first trial
            spikes = spikes[spikes > frametimings[4]]
            frametimings = frametimings[4:]
            # Find which trial each spike belongs to, and subtract one
            # to be able to use as indices
            trial_indices = np.digitize(spikes, frametimings[::4]) - 1

            rasterplot = []
            # Iterate over all the trials, create an empty array for each
            for j in range(int(np.ceil(frametimings.max() / total_cycle))):
                rasterplot.append([])
            # plt.eventplot requires a list containing spikes in each
            # trial separately
            for k in range(len(spikes)):
                trial = trial_indices[k]
                rasterplot[trial].append(spikes[k] - frametimings[::4][trial])

            # Workaround for matplotlib issue #6412.
            # https://github.com/matplotlib/matplotlib/issues/6412
            # If a cell has no spikes for the first trial i.e. the first
            # element of the list is empty, an error is raised due to
            # a plt.eventplot bug.
            if len(rasterplot[0]) == 0:
                rasterplot[0] = [-1]

            plt.figure(figsize=(9, 9))
            ax1 = plt.subplot(211)
            plt.eventplot(rasterplot, linewidth=.5, color='r')
            # Set the axis so they align with the rectangles
            plt.axis([0, total_cycle, -1, len(rasterplot)])

            # Draw rectangles to represent different parts of the on off
            # steps stimulus
            plf.drawonoff(ax1,
                          preframe_duration,
                          stim_duration,
                          contrast=contrast)

            plt.ylabel('Trial')
            plt.gca().invert_yaxis()
            ax1.set_xticks([])
            plf.spineless(ax1)

            # Collect all trials in one array to calculate firing rates
            ras = np.array([])
            for ii in range(len(rasterplot)):
                ras = np.append(ras, rasterplot[ii])

            # Sort into time bins and count how many spikes happened in each
            fr = np.digitize(ras, t)
            fr = np.bincount(fr)
            # Normalize so that units are spikes/s
            fr = fr * (bins / total_cycle) / (len(rasterplot) - 1)
            # Equalize the length of the two arrays for plotting.
            # np.bincount(x) normally produces x.max()+1 bins
            if fr.shape[0] == bins + 1:
                fr = fr[:-1]
            # If there aren't any spikes at the last trial, the firing
            # rates array is too short and plt.plot raises error.
            while fr.shape[0] < bins:
                fr = np.append(fr, 0)

            prefr = np.append(fr[pref1], fr[pref2])
            baseline = np.median(np.round(prefr))

            fr_corr = fr - baseline

            r_on = np.sum(fr_corr[onper])
            r_off = np.sum(fr_corr[offper])

            if r_on == 0 and r_off == 0:
                bias = np.nan
            else:
                bias = (r_on - r_off) / (np.abs(r_on) + np.abs(r_off))

            plt.suptitle(f'{exp_name}\n{stimname}'
                         f'\n{clusterids[i]} Rating: {clusters[i][2]}\n')

            if fr.max() < 20:
                bias = np.nan

            onoffbias[i] = bias
            baselines[i] = baseline

            all_frs.append(fr)

            ax2 = plt.subplot(212)
            plt.plot(t, fr)
            for eachslice in [onper, offper]:
                ax2.fill_between(t[eachslice],
                                 fr[eachslice],
                                 baseline,
                                 where=fr[eachslice] > baseline,
                                 facecolor='lightgray')

            plf.spineless(ax2)
            plt.axis([0, total_cycle, fr.min(), fr.max()])

            plt.title(f'Baseline: {baseline:2.0f} Hz Bias: {bias:0.2f}')
            plt.xlabel('Time[s]')
            plt.ylabel('Firing rate[spikes/s]')

            # Save as svg for looking through data, pdf for
            # inserting into presentations
            plt.savefig(
                savedir +
                '/{:0>3}{:0>2}.svg'.format(clusters[i, 0], clusters[i, 1]),
                format='svg',
                bbox_inches='tight')
            plt.savefig(os.path.join(
                savedir, 'pdf', '{:0>3}'
                '{:0>2}.pdf'.format(clusters[i, 0], clusters[i, 1])),
                        format='pdf',
                        bbox_inches='tight')
            plt.close()

        keystosave = [
            'clusters', 'total_cycle', 'bins', 'tstep', 'stimname',
            'stim_duration', 'preframe_duration', 'contrast', 'all_frs', 't',
            'exp_name', 'onoffbias', 'baselines'
        ]
        data_in_dict = {}
        for key in keystosave:
            data_in_dict[key] = locals()[key]

        np.savez(os.path.join(savedir, stim_nr + '_data'), **data_in_dict)
        print(f'Analysis of {stimname} completed.')
Exemple #28
0
def plotcheckersvd(expname, stimnr, filename=None):
    """
    Plot the first two components of SVD analysis.
    """
    if filename:
        filename = str(filename)

    exp_dir = iof.exp_dir_fixer(expname)
    _, metadata = asc.read_spikesheet(exp_dir)
    px_size = metadata['pixel_size(um)']

    if not filename:
        savefolder = 'SVD'
        label = ''
    else:
        label = filename.strip('.npz')
        savefolder = 'SVD_' + label

    data = iof.load(expname, stimnr, filename)

    stas = data['stas']
    max_inds = data['max_inds']
    clusters = data['clusters']
    stx_h = data['stx_h']
    frame_duration = data['frame_duration']
    stimname = data['stimname']
    exp_name = data['exp_name']

    clusterids = plf.clusters_to_ids(clusters)

    # Determine frame size so that the total frame covers
    # an area large enough i.e. 2*700um
    f_size = int(700 / (stx_h * px_size))

    for i in range(clusters.shape[0]):
        sta = stas[i]
        max_i = max_inds[i]

        try:
            sta, max_i = msc.cut_around_center(sta, max_i, f_size=f_size)
        except ValueError:
            continue
        fit_frame = sta[:, :, max_i[2]]

        try:
            sp1, sp2, t1, t2, _, _ = msc.svd(sta)
        # If the STA is noisy (msc.cut_around_center produces an empty array)
        # SVD cannot be calculated, in this case we skip that cluster.
        except np.linalg.LinAlgError:
            continue

        plotthese = [fit_frame, sp1, sp2]

        plt.figure(dpi=200)
        plt.suptitle(f'{exp_name}\n{stimname}\n{clusterids[i]}')
        rows = 2
        cols = 3

        vmax = np.max(np.abs([sp1, sp2]))
        vmin = -vmax

        for j in range(len(plotthese)):
            ax = plt.subplot(rows, cols, j + 1)
            im = plt.imshow(plotthese[j],
                            vmin=vmin,
                            vmax=vmax,
                            cmap=iof.config('colormap'))
            ax.set_aspect('equal')
            plt.xticks([])
            plt.yticks([])
            for child in ax.get_children():
                if isinstance(child, matplotlib.spines.Spine):
                    child.set_color('C{}'.format(j % 3))
                    child.set_linewidth(2)
            if j == 0:
                plt.title('center px')
            elif j == 1:
                plt.title('SVD spatial 1')
            elif j == 2:
                plt.title('SVD spatial 2')
                plf.colorbar(im, ticks=[vmin, 0, vmax], format='%.2f')
                barsize = 100 / (stx_h * px_size)
                scalebar = AnchoredSizeBar(ax.transData,
                                           barsize,
                                           '100 µm',
                                           'lower left',
                                           pad=0,
                                           color='k',
                                           frameon=False,
                                           size_vertical=.3)
                ax.add_artist(scalebar)

        t = np.arange(sta.shape[-1]) * frame_duration * 1000
        plt.subplots_adjust(wspace=0.3, hspace=0)
        ax = plt.subplot(rows, 1, 2)
        plt.plot(t, sta[max_i[0], max_i[1], :], label='center px')
        plt.plot(t, t1, label='Temporal 1')
        plt.plot(t, t2, label='Temporal 2')
        plt.xlabel('Time[ms]')
        plf.spineless(ax, 'trlb')  # Turn off spines using custom function

        plotpath = os.path.join(exp_dir, 'data_analysis', stimname, savefolder)
        if not os.path.isdir(plotpath):
            os.makedirs(plotpath, exist_ok=True)
        plt.savefig(os.path.join(plotpath, clusterids[i] + '.svg'), dpi=300)
        plt.close()
    print(f'Plotted checkerflicker SVD for {stimname}')
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Nov 29 11:52:37 2017

@author: ycan
"""
import plotfuncs as plf
raster = np.resize(spikes, (800, ))

ax = plt.subplot(211)
plt.eventplot(spikes, orientation='horizontal')
plf.spineless(ax)
ax = plt.subplot(212)
plt.eventplot(spikes)
plf.spineless(ax)

a = np.array([[1, 2, 3], [3, 4], [5, 6, 3, 2, 6, 3, 6, 2, 5, 7]])
a[0]
Exemple #30
0
def allfff(exp_name, stim_nrs):
    """
    Plot all of the full field flicker STAs on top of each other
    to see the progression of the cell responses, their firing rates.
    """

    if isinstance(stim_nrs, int) or len(stim_nrs) <= 1:
        print('Multiple full field flicker stimuli expected, '
              'allfff analysis will be skipped.')
        return

    exp_dir = iof.exp_dir_fixer(exp_name)
    exp_name = os.path.split(exp_dir)[-1]

    # Sanity check to ensure we are commparing the same stimuli and parameters
    prev_parameters = {}
    for i in stim_nrs:
        pars = asc.read_parameters(exp_name, i)
        currentfname = pars.pop('filename')
        if len(prev_parameters) == 0:
            prev_parameters = pars
        for k1, k2 in zip(pars.keys(), prev_parameters.keys()):
            if pars[k1] != prev_parameters[k2]:
                raise ValueError(
                    f'Parameters for {currentfname} do not match!\n'
                    f'{k1}:{pars[k1]}\n{k2}:{prev_parameters[k2]}')

    stimnames = []
    for j, stim in enumerate(stim_nrs):
        data = iof.load(exp_name, stim)
        stas = data['stas']
        clusters = data['clusters']
        filter_length = data['filter_length']
        frame_duration = data['frame_duration']
        if j == 0:
            all_stas = np.zeros(
                (clusters.shape[0], filter_length, len(stim_nrs)))
            all_spikenrs = np.zeros((clusters.shape[0], len(stim_nrs)))
        all_stas[:, :, j] = stas
        all_spikenrs[:, j] = data['spikenrs']
        stimnames.append(iof.getstimname(exp_name, stim))

    t = np.linspace(0, frame_duration * filter_length, num=filter_length)
    #%%
    clusterids = plf.clusters_to_ids(clusters)
    for i in range(clusters.shape[0]):
        fig = plt.figure()
        ax1 = plt.subplot(111)
        ax1.plot(t, all_stas[i, :, :])
        ax1.set_xlabel('Time [ms]')
        ax1.legend(stimnames, fontsize='x-small')
        ax2 = fig.add_axes([.65, .15, .2, .2])
        for j in range(len(stim_nrs)):
            ax2.plot(j, all_spikenrs[i, j], 'o')
        ax2.set_ylabel('# spikes', fontsize='small')
        ax2.set_xticks([])
        ax2.patch.set_alpha(0)
        plf.spineless(ax1, 'tr')
        plf.spineless(ax2, 'tr')
        plt.suptitle(f'{exp_name}\n {clusterids[i]}')
        plotpath = os.path.join(exp_dir, 'data_analysis', 'all_fff')
        if not os.path.isdir(plotpath):
            os.makedirs(plotpath, exist_ok=True)
        plt.savefig(os.path.join(plotpath, clusterids[i]) + '.svg',
                    format='svg',
                    dpi=300)
        plt.close()
    print('Plotted full field flicker STAs together from all stimuli.')