Example #1
0
 def get_frametimings(self):
     try:
         filter_length, frametimings = asc.ft_nblinks(
             self.exp, self.stimnr, self.param_file.get('Nblinks', None),
             self.refresh_rate)
     except ValueError as e:
         if str(e).startswith('Unexpected value for nblinks'):
             frametimings = asc.readframetimes(self.exp, self.stimnr)
             filter_length = None
     frametimings = frametimings[:self.maxframes]
     self.filter_length = filter_length
     self.frametimings = frametimings
     self.frame_duration = np.ediff1d(frametimings).mean()
Example #2
0
def savenpztomat(exp_name, savedir=None):
    """
    Convert frametime files in .npz to .mat for interoperability
    with MATLAB users.

    savedir
    """
    exp_dir = iof.exp_dir_fixer(exp_name)

    _, metadata = asc.read_spikesheet(exp_dir)
    monitor_delay = metadata['monitor_delay(s)']

    for i in range(1, 100):
        print(i)
        try:
            ft_on, ft_off = asc.readframetimes(exp_name, i, returnoffsets=True)
        except ValueError as e:
            if str(e).startswith('No frametimes'):
                break
            else:
                raise
        # Convert to milliseconds b/c that is the convertion in MATLAB scripts
        ft_on = (ft_on - monitor_delay) * 1000
        ft_off = (ft_off - monitor_delay) * 1000

        stimname = iof.getstimname(exp_dir, i)

        if savedir is None:
            savedir = pjoin(exp_dir, 'frametimes')
        savename = pjoin(savedir, stimname)
        print(savename)
        scipy.io.savemat(savename + '_frametimings', {
            'ftimes': ft_on,
            'ftimes_offsets': ft_off
        },
                         appendmat=True)
Example #3
0
def onoffstepsanalyzer(exp_name, stim_nrs):
    """
    Analyze onoffsteps data, plot and save it. Will make a directory
    /data_analysis/<stimulus_name> and save svg [and pdf in subfolder.].

    Parameters:
        exp_name:
            Experiment name.
        stim_nr:
            Order of the onoff steps stimulus.

    """

    exp_dir = iof.exp_dir_fixer(exp_name)

    exp_name = os.path.split(exp_dir)[-1]

    if isinstance(stim_nrs, int):
        stim_nrs = [stim_nrs]

    for stim_nr in stim_nrs:
        stim_nr = str(stim_nr)

        stimname = iof.getstimname(exp_dir, stim_nr)

        clusters, metadata = asc.read_spikesheet(exp_dir, cutoff=4)

        clusterids = plf.clusters_to_ids(clusters)

        parameters = asc.read_parameters(exp_dir, stim_nr)

        refresh_rate = metadata['refresh_rate']

        # Divide by the refresh rate to convert from number of
        # frames to seconds
        pars_stim_duration = parameters['Nframes'] / refresh_rate

        pars_preframe_duration = parameters.get('preframes', 0) / refresh_rate

        if pars_preframe_duration == 0:
            nopreframe = True
            nr_periods = 2
        else:
            nopreframe = False
            nr_periods = 4
        # The first trial will be discarded by dropping the first four frames
        # If we don't save the original and re-initialize for each cell,
        # frametimings will get smaller over time.
        frametimings_original = asc.readframetimes(exp_dir, stim_nr)

        trial_durs = stim_prefr_durations_frametimes(frametimings_original,
                                                     nr_per=nr_periods)
        avg_trial_durs = trial_durs.mean(axis=0)

        if not nopreframe:
            stim_duration = avg_trial_durs[1::2].mean()
            preframe_duration = avg_trial_durs[::2].mean()
        else:
            stim_duration = avg_trial_durs.mean()
            preframe_duration = 0
            warnings.warn('On-off steps analysis with no preframes'
                          'is not tested, proceed with caution.')

        contrast = parameters['contrast']

        total_cycle = avg_trial_durs.sum()

        # Set the bins to be 10 ms
        tstep = 0.01
        bins = int(total_cycle / tstep) + 1
        t = np.linspace(0, total_cycle, num=bins)

        # Setup for onoff bias calculation
        onbegin = preframe_duration
        onend = onbegin + stim_duration
        offbegin = onend + preframe_duration
        offend = offbegin + stim_duration

        # Determine the indices for each period
        a = []
        for i in [onbegin, onend, offbegin, offend]:
            yo = np.asscalar(np.where(np.abs(t - i) < tstep / 1.5)[0][-1])
            a.append(yo)

        # To exclude stimulus offset affecting the bias, use
        # last 1 second of preframe period
        prefs = []
        for i in [onbegin - 1, onbegin, offbegin - 1, offbegin]:
            yo = np.asscalar(np.where(np.abs(t - i) < tstep / 1.5)[0][-1])
            prefs.append(yo)

        onper = slice(a[0], a[1])
        offper = slice(a[2], a[3])

        pref1 = slice(prefs[0], prefs[1])
        pref2 = slice(prefs[2], prefs[3])

        onoffbias = np.empty(clusters.shape[0])
        baselines = np.empty(clusters.shape[0])

        savedir = os.path.join(exp_dir, 'data_analysis', stimname)
        os.makedirs(os.path.join(savedir, 'pdf'), exist_ok=True)

        # Collect all firing rates in a list
        all_frs = []

        for i in range(len(clusters[:, 0])):
            spikes = asc.read_raster(exp_dir, stim_nr, clusters[i, 0],
                                     clusters[i, 1])
            frametimings = frametimings_original
            # Discard all the spikes that happen after the last frame
            spikes = spikes[spikes < frametimings[-1]]
            # Discard the first trial
            spikes = spikes[spikes > frametimings[4]]
            frametimings = frametimings[4:]
            # Find which trial each spike belongs to, and subtract one
            # to be able to use as indices
            trial_indices = np.digitize(spikes, frametimings[::4]) - 1

            rasterplot = []
            # Iterate over all the trials, create an empty array for each
            for j in range(int(np.ceil(frametimings.max() / total_cycle))):
                rasterplot.append([])
            # plt.eventplot requires a list containing spikes in each
            # trial separately
            for k in range(len(spikes)):
                trial = trial_indices[k]
                rasterplot[trial].append(spikes[k] - frametimings[::4][trial])

            # Workaround for matplotlib issue #6412.
            # https://github.com/matplotlib/matplotlib/issues/6412
            # If a cell has no spikes for the first trial i.e. the first
            # element of the list is empty, an error is raised due to
            # a plt.eventplot bug.
            if len(rasterplot[0]) == 0:
                rasterplot[0] = [-1]

            plt.figure(figsize=(9, 9))
            ax1 = plt.subplot(211)
            plt.eventplot(rasterplot, linewidth=.5, color='r')
            # Set the axis so they align with the rectangles
            plt.axis([0, total_cycle, -1, len(rasterplot)])

            # Draw rectangles to represent different parts of the on off
            # steps stimulus
            plf.drawonoff(ax1,
                          preframe_duration,
                          stim_duration,
                          contrast=contrast)

            plt.ylabel('Trial')
            plt.gca().invert_yaxis()
            ax1.set_xticks([])
            plf.spineless(ax1)

            # Collect all trials in one array to calculate firing rates
            ras = np.array([])
            for ii in range(len(rasterplot)):
                ras = np.append(ras, rasterplot[ii])

            # Sort into time bins and count how many spikes happened in each
            fr = np.digitize(ras, t)
            fr = np.bincount(fr)
            # Normalize so that units are spikes/s
            fr = fr * (bins / total_cycle) / (len(rasterplot) - 1)
            # Equalize the length of the two arrays for plotting.
            # np.bincount(x) normally produces x.max()+1 bins
            if fr.shape[0] == bins + 1:
                fr = fr[:-1]
            # If there aren't any spikes at the last trial, the firing
            # rates array is too short and plt.plot raises error.
            while fr.shape[0] < bins:
                fr = np.append(fr, 0)

            prefr = np.append(fr[pref1], fr[pref2])
            baseline = np.median(np.round(prefr))

            fr_corr = fr - baseline

            r_on = np.sum(fr_corr[onper])
            r_off = np.sum(fr_corr[offper])

            if r_on == 0 and r_off == 0:
                bias = np.nan
            else:
                bias = (r_on - r_off) / (np.abs(r_on) + np.abs(r_off))

            plt.suptitle(f'{exp_name}\n{stimname}'
                         f'\n{clusterids[i]} Rating: {clusters[i][2]}\n')

            if fr.max() < 20:
                bias = np.nan

            onoffbias[i] = bias
            baselines[i] = baseline

            all_frs.append(fr)

            ax2 = plt.subplot(212)
            plt.plot(t, fr)
            for eachslice in [onper, offper]:
                ax2.fill_between(t[eachslice],
                                 fr[eachslice],
                                 baseline,
                                 where=fr[eachslice] > baseline,
                                 facecolor='lightgray')

            plf.spineless(ax2)
            plt.axis([0, total_cycle, fr.min(), fr.max()])

            plt.title(f'Baseline: {baseline:2.0f} Hz Bias: {bias:0.2f}')
            plt.xlabel('Time[s]')
            plt.ylabel('Firing rate[spikes/s]')

            # Save as svg for looking through data, pdf for
            # inserting into presentations
            plt.savefig(
                savedir +
                '/{:0>3}{:0>2}.svg'.format(clusters[i, 0], clusters[i, 1]),
                format='svg',
                bbox_inches='tight')
            plt.savefig(os.path.join(
                savedir, 'pdf', '{:0>3}'
                '{:0>2}.pdf'.format(clusters[i, 0], clusters[i, 1])),
                        format='pdf',
                        bbox_inches='tight')
            plt.close()

        keystosave = [
            'clusters', 'total_cycle', 'bins', 'tstep', 'stimname',
            'stim_duration', 'preframe_duration', 'contrast', 'all_frs', 't',
            'exp_name', 'onoffbias', 'baselines'
        ]
        data_in_dict = {}
        for key in keystosave:
            data_in_dict[key] = locals()[key]

        np.savez(os.path.join(savedir, stim_nr + '_data'), **data_in_dict)
        print(f'Analysis of {stimname} completed.')
def randomizestripes(label, exp_name='20180124', stim_nrs=6):
    exp_dir = iof.exp_dir_fixer(exp_name)

    if isinstance(stim_nrs, int):
        stim_nrs = [stim_nrs]

    for stim_nr in stim_nrs:
        stimname = iof.getstimname(exp_name, stim_nr)

        clusters, metadata = asc.read_spikesheet(exp_dir)

        parameters = asc.read_parameters(exp_dir, stim_nr)

        scr_width = metadata['screen_width']
        px_size = metadata['pixel_size(um)']

        stx_w = parameters['stixelwidth']
        stx_h = parameters['stixelheight']

        if (stx_h/stx_w) < 2:
            raise ValueError('Make sure the stimulus is stripeflicker.')

        sy = scr_width/stx_w
#        sy = sy*4
        sy = int(sy)

        nblinks = parameters['Nblinks']
        try:
            bw = parameters['blackwhite']
        except KeyError:
            bw = False

        try:
            seed = parameters['seed']
            initialseed = parameters['seed']
        except KeyError:
            seed = -10000
            initialseed = -10000

        if nblinks == 1:
            ft_on, ft_off = asc.readframetimes(exp_dir, stim_nr,
                                               returnoffsets=True)
            # Initialize empty array twice the size of one of them, assign
            # value from on or off to every other element.
            frametimings = np.empty(ft_on.shape[0]*2, dtype=float)
            frametimings[::2] = ft_on
            frametimings[1::2] = ft_off
            # Set filter length so that temporal filter is ~600 ms.
            # The unit here is number of frames.
            filter_length = 40
        elif nblinks == 2:
            frametimings = asc.readframetimes(exp_dir, stim_nr)
            filter_length = 20
        else:
            raise ValueError('Unexpected value for nblinks.')

        # Omit everything that happens before the first 10 seconds
        cut_time = 10

        frame_duration = np.average(np.ediff1d(frametimings))
        total_frames = int(frametimings.shape[0]/4)

        all_spiketimes = []
        # Store spike triggered averages in a list containing correct
        # shaped arrays
        stas = []

        for i in range(len(clusters[:, 0])):
            spikes_orig = asc.read_raster(exp_dir, stim_nr,
                                         clusters[i, 0], clusters[i, 1])
            spikesneeded = spikes_orig.shape[0]*1000

            spiketimes = np.random.random_sample(spikesneeded)*spikes_orig.max()
            spiketimes = np.sort(spiketimes)
            spikes = asc.binspikes(spiketimes, frametimings)
            all_spiketimes.append(spikes)
            stas.append(np.zeros((sy, filter_length)))

        if bw:
            randnrs, seed = randpy.ran1(seed, sy*total_frames)
#            randnrs = mersennetw(sy*total_frames, seed1=seed)
            randnrs = [1 if i > .5 else -1 for i in randnrs]
        else:
            randnrs, seed = randpy.gasdev(seed, sy*total_frames)

        stimulus = np.reshape(randnrs, (sy, total_frames), order='F')
        del randnrs

        for k in range(filter_length, total_frames-filter_length+1):
            stim_small = stimulus[:, k-filter_length+1:k+1][:, ::-1]
            for j in range(clusters.shape[0]):
                spikes = all_spiketimes[j]
                if spikes[k] != 0 and frametimings[k]>cut_time:
                    stas[j] += spikes[k]*stim_small

        max_inds = []

        spikenrs = np.array([spikearr.sum() for spikearr in all_spiketimes])

        quals = np.array([])

        for i in range(clusters.shape[0]):
            stas[i] = stas[i]/spikenrs[i]
            # Find the pixel with largest absolute value
            max_i = np.squeeze(np.where(np.abs(stas[i])
                                        == np.max(np.abs(stas[i]))))
            # If there are multiple pixels with largest value,
            # take the first one.
            if max_i.shape != (2,):
                try:
                    max_i = max_i[:, 0]
                # If max_i cannot be found just set it to zeros.
                except IndexError:
                    max_i = np.array([0, 0])

            max_inds.append(max_i)

            quals = np.append(quals, asc.staquality(stas[i]))

#        savefname = str(stim_nr)+'_data'
#        savepath = pjoin(exp_dir, 'data_analysis', stimname)
#
#        exp_name = os.path.split(exp_dir)[-1]
#
#        if not os.path.isdir(savepath):
#            os.makedirs(savepath, exist_ok=True)
#        savepath = os.path.join(savepath, savefname)
#
#        keystosave = ['stas', 'max_inds', 'clusters', 'sy',
#                      'frame_duration', 'all_spiketimes', 'stimname',
#                      'total_frames', 'stx_w', 'spikenrs', 'bw',
#                      'quals', 'nblinks', 'filter_length', 'exp_name']
#        data_in_dict = {}
#        for key in keystosave:
#            data_in_dict[key] = locals()[key]
#
#        np.savez(savepath, **data_in_dict)
#        print(f'Analysis of {stimname} completed.')


        clusterids = plf.clusters_to_ids(clusters)

#        assert(initialseed.ty)
        correction = corrector(sy, total_frames, filter_length, initialseed)
        correction = np.outer(correction, np.ones(filter_length))

        t = np.arange(filter_length)*frame_duration*1000
        vscale = int(stas[0].shape[0] * stx_w*px_size/1000)
        for i in range(clusters.shape[0]):
            sta = stas[i]-correction

            vmax = 0.03
            vmin = -vmax
            plt.figure(figsize=(6, 15))
            ax = plt.subplot(111)
            im = ax.imshow(sta, cmap='RdBu', vmin=vmin, vmax=vmax,
                           extent=[0, t[-1], -vscale, vscale], aspect='auto')
            plt.xlabel('Time [ms]')
            plt.ylabel('Distance [mm]')

            plf.spineless(ax)
            plf.colorbar(im, ticks=[vmin, 0, vmax], format='%.2f', size='2%')
            plt.suptitle('{}\n{}\n'
                         '{} Rating: {}\n'
                         'nrofspikes {:5.0f}'.format(exp_name,
                                                       stimname,
                                                       clusterids[i],
                                                       clusters[i][2],
                                                       spikenrs[i]))
            plt.subplots_adjust(top=.90)
            savepath = os.path.join(exp_dir, 'data_analysis',
                                    stimname, 'STAs_randomized')
            svgpath = pjoin(savepath, label)
            if not os.path.isdir(svgpath):
                os.makedirs(svgpath, exist_ok=True)
            plt.savefig(os.path.join(svgpath, clusterids[i]+'.svg'),
                        bbox_inches='tight')
            plt.close()

    os.system(f"convert -delay 25 {svgpath}/*svg {savepath}/animated_{label}.gif")
 def get_frametimings(self):
     frametimings = asc.readframetimes(self.exp,
                                       self.stimnr)[:self.maxframes_i]
     self.frametimings = frametimings
def checkerflickerplusanalyzer(exp_name,
                               stimulusnr,
                               clusterstoanalyze=None,
                               frametimingsfraction=None,
                               cutoff=4):
    """
    Analyzes checkerflicker-like data, typically interspersed
    stimuli in between chunks of checkerflicker.
    e.g. checkerflickerplusmovie, frozennoise

    Parameters:
    ----------
        exp_name:
            Experiment name.
        stimulusnr:
            Number of the stimulus to be analyzed.
        clusterstoanalyze:
            Number of clusters should be analyzed. Default is None.

            First N cells will be analyzed if this parameter is given.
            In case of long recordings it might make sense to first
            look at a subset of cells before starting to analyze
            the whole dataset.

        frametimingsfraction:
            Fraction of the recording to analyze. Should be a number
            between 0 and 1. e.g. 0.3 will analyze the first 30% of
            the whole recording.
        cutoff:
           Worst rating that is wanted for the analysis. Default
           is 4. The source of this value is manual rating of each
           cluster.
    """
    exp_dir = iof.exp_dir_fixer(exp_name)

    stimname = iof.getstimname(exp_dir, stimulusnr)

    exp_name = os.path.split(exp_dir)[-1]

    clusters, metadata = asc.read_spikesheet(exp_dir, cutoff=cutoff)

    # Check that the inputs are as expected.
    if clusterstoanalyze:
        if clusterstoanalyze > len(clusters[:, 0]):
            warnings.warn('clusterstoanalyze is larger '
                          'than number of clusters in dataset. '
                          'All cells will be included.')
            clusterstoanalyze = None
    if frametimingsfraction:
        if not 0 < frametimingsfraction < 1:
            raise ValueError('Invalid input for frametimingsfraction: {}. '
                             'It should be a number between 0 and 1'
                             ''.format(frametimingsfraction))

    scr_width = metadata['screen_width']
    scr_height = metadata['screen_height']

    refresh_rate = metadata['refresh_rate']

    parameters = asc.read_parameters(exp_dir, stimulusnr)

    stx_h = parameters['stixelheight']
    stx_w = parameters['stixelwidth']

    # Check whether any parameters are given for margins, calculate
    # screen dimensions.
    marginkeys = ['tmargin', 'bmargin', 'rmargin', 'lmargin']
    margins = []
    for key in marginkeys:
        margins.append(parameters.get(key, 0))

    # Subtract bottom and top from vertical dimension; left and right
    # from horizontal dimension
    scr_width = scr_width - sum(margins[2:])
    scr_height = scr_height - sum(margins[:2])

    nblinks = parameters['Nblinks']
    bw = parameters.get('blackwhite', False)

    # Gaussian stimuli are not supported yet, we need to ensure we
    # have a black and white stimulus
    if bw is not True:
        raise ValueError('Gaussian stimuli are not supported yet!')

    seed = parameters.get('seed', -1000)

    sx, sy = scr_height / stx_h, scr_width / stx_w

    # Make sure that the number of stimulus pixels are integers
    # Rounding down is also possible but might require
    # other considerations.
    if sx % 1 == 0 and sy % 1 == 0:
        sx, sy = int(sx), int(sy)
    else:
        raise ValueError('sx and sy must be integers')

    filter_length, frametimings = asc.ft_nblinks(exp_dir, stimulusnr)

    if parameters['stimulus_type'] in [
            'FrozenNoise', 'checkerflickerplusmovie'
    ]:
        runfr = parameters['RunningFrames']
        frofr = parameters['FrozenFrames']
        # To generate the frozen noise, a second seed is used.
        # The default value of this is -10000 as per StimulateOpenGL
        secondseed = parameters.get('secondseed', -10000)

        if parameters['stimulus_type'] == 'checkerflickerplusmovie':
            mblinks = parameters['Nblinksmovie']
            # Retrivee the number of frames (files) from parameters['path']
            ipath = PureWindowsPath(parameters['path']).as_posix()
            repldict = iof.config('stimuli_path_replace')
            for needle, repl in repldict.items():
                ipath = ipath.replace(needle, repl)
            ipath = os.path.normpath(ipath)  # Windows compatiblity
            moviefr = len([
                name for name in os.listdir(ipath)
                if os.path.isfile(os.path.join(ipath, name))
                and name.lower().endswith('.raw')
            ])
            noiselen = (runfr + frofr) * nblinks
            movielen = moviefr * mblinks
            triallen = noiselen + movielen

            ft_on, ft_off = asc.readframetimes(exp_dir,
                                               stimulusnr,
                                               returnoffsets=True)
            frametimings = np.empty(ft_on.shape[0] * 2, dtype=float)
            frametimings[::2] = ft_on
            frametimings[1::2] = ft_off

            import math
            ntrials = math.floor(frametimings.size / triallen)
            trials = np.zeros((ntrials, runfr + frofr + moviefr))
            for t in range(ntrials):
                frange = frametimings[t * triallen:(t + 1) * triallen]
                trials[t, :runfr + frofr] = frange[:noiselen][::nblinks]
                trials[t, runfr + frofr:] = frange[noiselen:][::mblinks]
            frametimings = trials.ravel()

            filter_length = np.int(np.round(.666 * refresh_rate / nblinks))

            # Add frozen movie to frozen noise (for masking)
            frofr += moviefr

    savefname = str(stimulusnr) + '_data'

    if clusterstoanalyze:
        clusters = clusters[:clusterstoanalyze, :]
        print('Analyzing first %s cells' % clusterstoanalyze)
        savefname += '_' + str(clusterstoanalyze) + 'cells'
    if frametimingsfraction:
        frametimingsindex = int(len(frametimings) * frametimingsfraction)
        frametimings = frametimings[:frametimingsindex]
        print('Analyzing first {}% of'
              ' the recording'.format(frametimingsfraction * 100))
        savefname += '_' + str(frametimingsfraction).replace('.',
                                                             '') + 'fraction'
    frame_duration = np.average(np.ediff1d(frametimings))
    total_frames = frametimings.shape[0]

    all_spiketimes = []
    # Store spike triggered averages in a list containing correct shaped
    # arrays
    stas = []

    for i in range(len(clusters[:, 0])):
        spiketimes = asc.read_raster(exp_dir, stimulusnr, clusters[i, 0],
                                     clusters[i, 1])

        spikes = asc.binspikes(spiketimes, frametimings)
        all_spiketimes.append(spikes)
        stas.append(np.zeros((sx, sy, filter_length)))

    # Separate out the repeated parts
    all_spiketimes = np.array(all_spiketimes)
    mask = runfreezemask(total_frames, runfr, frofr, refresh_rate)
    repeated_spiketimes = all_spiketimes[:, ~mask]
    run_spiketimes = all_spiketimes[:, mask]

    # We need to cut down the total_frames by the same amount
    # as spiketimes
    total_run_frames = run_spiketimes.shape[1]
    # To be able to use the same code as checkerflicker analyzer,
    # convert to list again.
    run_spiketimes = list(run_spiketimes)

    # Empirically determined to be best for 32GB RAM
    desired_chunk_size = 21600000

    # Length of the chunks (specified in number of frames)
    chunklength = int(desired_chunk_size / (sx * sy))

    chunksize = chunklength * sx * sy
    nrofchunks = int(np.ceil(total_run_frames / chunklength))

    print(f'\nAnalyzing {stimname}.\nTotal chunks: {nrofchunks}')

    time = startime = datetime.datetime.now()
    timedeltas = []

    quals = np.zeros(len(stas))

    frame_counter = 0

    for i in range(nrofchunks):
        randnrs, seed = randpy.ranb(seed, chunksize)
        # Reshape and change 0's to -1's
        stimulus = np.reshape(randnrs,
                              (sx, sy, chunklength), order='F') * 2 - 1
        del randnrs

        # Range of indices we are interested in for the current chunk
        if (i + 1) * chunklength < total_run_frames:
            chunkind = slice(i * chunklength, (i + 1) * chunklength)
            chunkend = chunklength
        else:
            chunkind = slice(i * chunklength, None)
            chunkend = total_run_frames - i * chunklength

        for k in range(filter_length, chunkend - filter_length + 1):
            stim_small = stimulus[:, :,
                                  k - filter_length + 1:k + 1][:, :, ::-1]
            for j in range(clusters.shape[0]):
                spikes = run_spiketimes[j][chunkind]
                if spikes[k] != 0:
                    stas[j] += spikes[k] * stim_small
        qual = np.array([])
        for c in range(clusters.shape[0]):
            qual = np.append(qual, asc.staquality(stas[c]))
        quals = np.vstack((quals, qual))

        # Draw progress bar
        width = 50  # Number of characters
        prog = i / (nrofchunks - 1)
        bar_complete = int(prog * width)
        bar_noncomplete = width - bar_complete
        timedeltas.append(msc.timediff(time))  # Calculate running avg
        avgelapsed = np.mean(timedeltas)
        elapsed = np.sum(timedeltas)
        etc = startime + elapsed + avgelapsed * (nrofchunks - i)
        sys.stdout.flush()
        sys.stdout.write('\r{}{} |{:4.1f}% ETC: {}'.format(
            '█' * bar_complete, '-' * bar_noncomplete, prog * 100,
            etc.strftime("%a %X")))
        time = datetime.datetime.now()
    sys.stdout.write('\n')

    # Remove the first row which is full of random nrs.
    quals = quals[1:, :]

    max_inds = []
    spikenrs = np.array([spikearr.sum() for spikearr in run_spiketimes])

    for i in range(clusters.shape[0]):
        with warnings.catch_warnings():
            warnings.filterwarnings('ignore', '.*true_divide*.')
            stas[i] = stas[i] / spikenrs[i]
        # Find the pixel with largest absolute value
        max_i = np.squeeze(
            np.where(np.abs(stas[i]) == np.max(np.abs(stas[i]))))
        # If there are multiple pixels with largest value,
        # take the first one.
        if max_i.shape != (3, ):
            try:
                max_i = max_i[:, 0]
            # If max_i cannot be found just set it to zeros.
            except IndexError:
                max_i = np.array([0, 0, 0])

        max_inds.append(max_i)

    print(f'Completed. Total elapsed time: {msc.timediff(startime)}\n' +
          f'Finished on {datetime.datetime.now().strftime("%A %X")}')

    savepath = os.path.join(exp_dir, 'data_analysis', stimname)
    if not os.path.isdir(savepath):
        os.makedirs(savepath, exist_ok=True)
    savepath = os.path.join(savepath, savefname)

    keystosave = [
        'clusters', 'frametimings', 'mask', 'repeated_spiketimes',
        'run_spiketimes', 'frame_duration', 'max_inds', 'nblinks', 'stas',
        'stx_h', 'stx_w', 'total_run_frames', 'sx', 'sy', 'filter_length',
        'stimname', 'exp_name', 'spikenrs', 'clusterstoanalyze',
        'frametimingsfraction', 'cutoff', 'quals', 'nrofchunks', 'chunklength'
    ]
    datadict = {}

    for key in keystosave:
        datadict[key] = locals()[key]

    np.savez(savepath, **datadict)

    t = (np.arange(nrofchunks) * chunklength * frame_duration) / refresh_rate
    qmax = np.max(quals, axis=0)
    qualsn = quals / qmax[np.newaxis, :]

    ax = plt.subplot(111)
    ax.plot(t, qualsn, alpha=0.3)
    plt.ylabel('Z-score of center pixel (normalized)')
    plt.xlabel('Minutes of stimulus analyzed')
    plt.ylim([0, 1])
    plf.spineless(ax, 'tr')
    plt.title(f'Recording duration optimization\n{exp_name}\n {savefname}')
    plt.savefig(savepath + '.svg', format='svg')
    plt.close()
def stripeflickeranalysis(exp_name, stim_nrs):
    exp_dir = iof.exp_dir_fixer(exp_name)

    if isinstance(stim_nrs, int):
        stim_nrs = [stim_nrs]

    for stim_nr in stim_nrs:
        stimname = iof.getstimname(exp_name, stim_nr)

        clusters, metadata = asc.read_spikesheet(exp_dir)

        parameters = asc.read_parameters(exp_dir, stim_nr)

        scr_width = metadata['screen_width']
        px_size = metadata['pixel_size(um)']

        stx_w = parameters['stixelwidth']
        stx_h = parameters['stixelheight']

        if (stx_h / stx_w) < 2:
            raise ValueError('Make sure the stimulus is stripeflicker.')

        sy = scr_width / stx_w
        if sy % 1 == 0:
            sy = int(sy)
        else:
            raise ValueError('sy is not an integer')

        nblinks = parameters['Nblinks']
        try:
            bw = parameters['blackwhite']
        except KeyError:
            bw = False

        try:
            seed = parameters['seed']
        except KeyError:
            seed = -10000

        if nblinks == 1:
            ft_on, ft_off = asc.readframetimes(exp_dir,
                                               stim_nr,
                                               returnoffsets=True)
            # Initialize empty array twice the size of one of them, assign
            # value from on or off to every other element.
            frametimings = np.empty(ft_on.shape[0] * 2, dtype=float)
            frametimings[::2] = ft_on
            frametimings[1::2] = ft_off
            # Set filter length so that temporal filter is ~600 ms.
            # The unit here is number of frames.
            filter_length = 40
        elif nblinks == 2:
            frametimings = asc.readframetimes(exp_dir, stim_nr)
            filter_length = 20
        else:
            raise ValueError('Unexpected value for nblinks.')

        # Omit everything that happens before the first 10 seconds
        cut_time = 10

        frame_duration = np.average(np.ediff1d(frametimings))
        total_frames = frametimings.shape[0]

        all_spiketimes = []
        # Store spike triggered averages in a list containing correct
        # shaped arrays
        stas = []

        for i in range(len(clusters[:, 0])):
            spiketimes = asc.read_raster(exp_dir, stim_nr, clusters[i, 0],
                                         clusters[i, 1])
            spikes = asc.binspikes(spiketimes, frametimings)
            all_spiketimes.append(spikes)
            stas.append(np.zeros((sy, filter_length)))

        if bw:
            randnrs, seed = randpy.ran1(seed, sy * total_frames)
            randnrs = [1 if i > .5 else -1 for i in randnrs]
        else:
            randnrs, seed = randpy.gasdev(seed, sy * total_frames)

        stimulus = np.reshape(randnrs, (sy, total_frames), order='F')
        del randnrs

        for k in range(filter_length, total_frames - filter_length + 1):
            stim_small = stimulus[:, k - filter_length + 1:k + 1][:, ::-1]
            for j in range(clusters.shape[0]):
                spikes = all_spiketimes[j]
                if spikes[k] != 0 and frametimings[k] > cut_time:
                    stas[j] += spikes[k] * stim_small

        max_inds = []
        spikenrs = np.array([spikearr.sum() for spikearr in all_spiketimes])

        quals = np.array([])

        for i in range(clusters.shape[0]):
            stas[i] = stas[i] / spikenrs[i]
            # Find the pixel with largest absolute value
            max_i = np.squeeze(
                np.where(np.abs(stas[i]) == np.max(np.abs(stas[i]))))
            # If there are multiple pixels with largest value,
            # take the first one.
            if max_i.shape != (2, ):
                try:
                    max_i = max_i[:, 0]
                # If max_i cannot be found just set it to zeros.
                except IndexError:
                    max_i = np.array([0, 0])

            max_inds.append(max_i)

            quals = np.append(quals, asc.staquality(stas[i]))

        savefname = str(stim_nr) + '_data'
        savepath = pjoin(exp_dir, 'data_analysis', stimname)

        exp_name = os.path.split(exp_dir)[-1]

        if not os.path.isdir(savepath):
            os.makedirs(savepath, exist_ok=True)
        savepath = os.path.join(savepath, savefname)

        keystosave = [
            'stas', 'max_inds', 'clusters', 'sy', 'frame_duration',
            'all_spiketimes', 'stimname', 'total_frames', 'stx_w', 'spikenrs',
            'bw', 'quals', 'nblinks', 'filter_length', 'exp_name'
        ]
        data_in_dict = {}
        for key in keystosave:
            data_in_dict[key] = locals()[key]

        np.savez(savepath, **data_in_dict)
        print(f'Analysis of {stimname} completed.')
Example #8
0
def OMSpatchesanalyzer(exp_name, stim_nrs):
    """
    Analyze and plot the responses to object motion patches stimulus.
    """

    exp_dir = iof.exp_dir_fixer(exp_name)

    exp_name = os.path.split(exp_dir)[-1]

    if isinstance(stim_nrs, int):
        stim_nrs = [stim_nrs]
    elif len(stim_nrs) == 0:
        return

    clusters, metadata = asc.read_spikesheet(exp_dir, cutoff=4)
    clusterids = plf.clusters_to_ids(clusters)
    all_omsi = np.empty((clusters.shape[0], len(stim_nrs)))
    stimnames = []
    for stim_index, stim_nr in enumerate(stim_nrs):
        stim_nr = str(stim_nr)

        stimname = iof.getstimname(exp_dir, stim_nr)
        stimnames.append(stimname)

        parameters = asc.read_parameters(exp_dir, stim_nr)

        refresh_rate = metadata['refresh_rate']

        nblinks = parameters.get('Nblinks', 1)
        seed = parameters.get('seed', -10000)
        stim_duration = parameters.get('stimFrames', 1400)
        # The duration in the parameters refers to the total duration of both
        # epochs. We divide by two to get the length of a single stim_duration
        stim_duration = int(stim_duration / 2)
        prefr_duration = parameters.get('preFrames', 100)

        frametimings = asc.readframetimes(exp_dir, stim_nr)

        # ntrials is the number of trials containing both
        ntrials = np.floor((frametimings.shape[0] / (stim_duration + 1))) / 2
        ntrials = ntrials.astype(int)
        frametimings_rs = frametimings[:ntrials * 2 * (stim_duration + 1)]
        frametimings_rs = frametimings_rs.reshape(
            (ntrials * 2, stim_duration + 1))

        ft_local = frametimings_rs[::2][:, :-1]
        ft_global = frametimings_rs[1::2][:, :-1]

        localspikes = np.empty((clusters.shape[0], ntrials, stim_duration))
        globalspikes = np.empty((clusters.shape[0], ntrials, stim_duration))

        for i, cluster in enumerate(clusters):
            spikes = asc.read_raster(exp_name, stim_nr, cluster[0], cluster[1])
            for j in range(ntrials):
                localspikes[i, j, :] = asc.binspikes(spikes, ft_local[j, :])
                globalspikes[i, j, :] = asc.binspikes(spikes, ft_global[j, :])

        response_local = localspikes.mean(axis=1)
        response_global = globalspikes.mean(axis=1)

        # Differential and coherent firing rates
        fr_d = response_local.mean(axis=1)
        fr_c = response_global.mean(axis=1)

        # Calculate object motion sensitivity index (OMSI) as defined in
        # Kühn et al, 2016
        # There the first second of each trial is discarded, here it does not
        # seem to be very different from the rest.
        omsi = (fr_d - fr_c) / (fr_d + fr_c)

        # Create a time array for plotting
        time = np.linspace(0,
                           stim_duration * 2 / refresh_rate,
                           num=stim_duration)

        savepath = os.path.join(exp_dir, 'data_analysis', stimname)
        if not os.path.isdir(savepath):
            os.makedirs(savepath, exist_ok=True)

        for i, cluster in enumerate(clusters):
            gs = gridspec.GridSpec(2, 1)
            ax1 = plt.subplot(gs[0])
            ax2 = plt.subplot(gs[1])

            rastermat = np.vstack(
                (localspikes[i, :, :], globalspikes[i, :, :]))
            ax1.matshow(rastermat, cmap='Greys')
            ax1.axhline(ntrials - 1, color='r', lw=.1)
            ax1.plot([0, 0], [ntrials, 0])
            ax1.plot([0, 0], [ntrials * 2, ntrials])
            ax1.set_xticks([])
            ax1.set_yticks([])
            plf.spineless(ax1)

            ax2.plot(time, response_local[i, :], label='Local')
            ax2.plot(time, response_global[i, :], label='Global')
            ax2.set_xlabel('Time [s]')
            ax2.set_ylabel('Average firing rate [au]')
            ax2.set_xlim([time.min(), time.max()])
            plf.spineless(ax2, 'tr')
            ax2.legend(fontsize='x-small')

            plt.suptitle(f'{exp_name}\n{stimname}\n'
                         f'{clusterids[i]} OMSI: {omsi[i]:4.2f}')
            plt.tight_layout()
            plt.savefig(os.path.join(savepath, clusterids[i] + '.svg'),
                        bbox_inches='tight')
            plt.close()
        keystosave = [
            'nblinks', 'refresh_rate', 'stim_duration', 'prefr_duration',
            'ntrials', 'response_local', 'response_global', 'fr_d', 'fr_c',
            'omsi', 'clusters'
        ]
        datadict = {}

        for key in keystosave:
            datadict[key] = locals()[key]

        npzfpath = os.path.join(savepath, str(stim_nr) + '_data')
        np.savez(npzfpath, **datadict)
        all_omsi[:, stim_index] = omsi
    print(f'Analysis of {stimname} completed.')
    # Draw the distribution of the OMSI for all OMSI stimuli
    # If there is only one OMS stimulus, draw it in the same folder
    # If there are multiple stimuli, save it in the data analysis folder
    if len(stim_nrs) == 1:
        pop_plot_savepath = os.path.join(savepath, 'omsi_population.svg')
    else:
        pop_plot_savepath = os.path.split(savepath)[0]
        pop_plot_savepath = os.path.join(pop_plot_savepath, 'all_omsi.svg')

    plt.figure(figsize=(5, 2 * len(stim_nrs)))
    ax2 = plt.subplot(111)
    for j, stim_nr in enumerate(stim_nrs):
        np.random.seed(j)
        ax2.scatter(all_omsi[:, j],
                    j + (np.random.random(omsi.shape) - .5) / 1.1)
    np.random.seed()
    ax2.set_yticks(np.arange(len(stim_nrs)))
    ax2.set_yticklabels(stimnames, fontsize='xx-small', rotation='45')
    ax2.set_xlabel('Object-motion sensitivity index')
    ax2.set_title(f'{exp_name}\nDistribution of OMSI')
    plf.spineless(ax2, 'tr')
    plt.savefig(pop_plot_savepath, bbox_inches='tight')
    plt.close()
Example #9
0
    # Make sure that the number of stimulus pixels are integers
    # Rounding down is also possible but might require
    # other considerations.
    if sx % 1 == 0 and sy % 1 == 0:
        sx, sy = int(sx), int(sy)
    else:
        raise ValueError('sx and sy must be integers')

    # If the frame rate of the checkerflicker stimulus is 16 ms, (i.e.
    # Nblinks is set to 1), frame timings should be handled differently
    # because the state of the pulse can only be changed when a new
    # frame is delivered. For this reason, the offsets of the pulses
    # also denote a frame change as well as onsets.
    if nblinks == 1:
        ft_on, ft_off = asc.readframetimes(exp_dir, stimulusnr,
                                           returnoffsets=True)
        # Initialize empty array twice the size of one of them, assign
        # value from on or off to every other element.
        frametimings = np.empty(ft_on.shape[0]*2, dtype=float)
        frametimings[::2] = ft_on
        frametimings[1::2] = ft_off
        # Set filter length so that temporal filter is ~600 ms. The unit
        # here is number of frames.
        filter_length = 40
    elif nblinks == 2:
        frametimings = asc.readframetimes(exp_dir, stimulusnr)
        filter_length = 20
    elif nblinks == 4:
        frametimings = asc.readframetimes(exp_dir, stimulusnr)
        filter_length = 10
    else:
def saccadegratingsanalyzer(exp_name, stim_nr):
    """
    Analyze and save responses to saccadegratings stimulus.
    """

    exp_dir = iof.exp_dir_fixer(exp_name)
    exp_name = os.path.split(exp_dir)[-1]
    stimname = iof.getstimname(exp_dir, stim_nr)
    clusters, metadata = asc.read_spikesheet(exp_dir)
    clusterids = plf.clusters_to_ids(clusters)

    refresh_rate = metadata['refresh_rate']

    parameters = asc.read_parameters(exp_name, stim_nr)
    if parameters['stimulus_type'] != 'saccadegrating':
        raise ValueError('Unexpected stimulus type: '
                         f'{parameters["stimulus_type"]}')
    fixfr = parameters.get('fixationframes', 80)
    sacfr = parameters.get('saccadeframes', 10)
    barwidth = parameters.get('barwidth', 40)
    averageshift = parameters.get('averageshift', 2)
    # The seed is hard-coded in the Stimulator
    seed = -10000

    ftimes = asc.readframetimes(exp_dir, stim_nr)
    ftimes.resize(int(ftimes.shape[0] / 2), 2)
    nfr = ftimes.size
    # Re-generate the stimulus
    # Amplitude of the shift and the transition type (saccade or grey is
    # determined based on the output of ran1
    randnrs = np.array(randpy.ran1(seed, nfr)[0])

    # Separate the amplitude and transitions into two arrays
    stimpos = (4 * randnrs[::2]).astype(int)

    # Transition variable, determines whether grating is moving during
    # the transion or only a grey screen is presented.
    trans = np.array(randnrs[1::2] > 0.5)

    # Record before and after positions in a single array and remove
    # The first element b/c there is no before value
    stimposx = np.append(0, stimpos)[:-1]
    stimtr = np.stack((stimposx, stimpos), axis=1)[1:]
    trans = trans[:-1]

    saccadetr = stimtr[trans, :]
    greytr = stimtr[~trans, :]

    # Create a time vector with defined temporal bin size
    tstep = 0.01  # Bin size is defined here, unit is seconds
    trialduration = (fixfr + sacfr) / refresh_rate
    nrsteps = int(trialduration / tstep) + 1
    t = np.linspace(0, trialduration, num=nrsteps)

    # Collect saccade beginning time for each trial
    trials = ftimes[1:, 0]
    sacftimes = trials[trans]
    greyftimes = trials[~trans]

    sacspikes = np.empty((clusters.shape[0], sacftimes.shape[0], t.shape[0]))
    greyspikes = np.empty((clusters.shape[0], greyftimes.shape[0], t.shape[0]))
    # Collect all the psth in one array. The order is
    # transision type, cluster index, start pos, target pos, time
    psth = np.zeros((2, clusters.shape[0], 4, 4, t.size))

    for i, (chid, clid, _) in enumerate(clusters):
        spiketimes = asc.read_raster(exp_dir, stim_nr, chid, clid)
        for j, _ in enumerate(sacftimes):
            sacspikes[i, j, :] = asc.binspikes(spiketimes, sacftimes[j] + t)
        for k, _ in enumerate(greyftimes):
            greyspikes[i, k, :] = asc.binspikes(spiketimes, greyftimes[k] + t)

    # Sort trials according to the transition type
    # nton[i][j] contains the indexes of trials where saccade was i to j
    nton_sac = [[[] for _ in range(4)] for _ in range(4)]
    for i, trial in enumerate(saccadetr):
        nton_sac[trial[0]][trial[1]].append(i)
    nton_grey = [[[] for _ in range(4)] for _ in range(4)]
    for i, trial in enumerate(greytr):
        nton_grey[trial[0]][trial[1]].append(i)

    savedir = os.path.join(exp_dir, 'data_analysis', stimname)
    os.makedirs(savedir, exist_ok=True)
    for i in range(clusters.shape[0]):
        fig, axes = plt.subplots(4,
                                 4,
                                 sharex=True,
                                 sharey=True,
                                 figsize=(8, 8))
        for j in range(4):
            for k in range(4):
                # Start from bottom left corner
                ax = axes[3 - j][k]
                # Average all transitions of one type
                psth_sac = sacspikes[i, nton_sac[j][k], :].mean(axis=0)
                psth_grey = greyspikes[i, nton_grey[j][k], :].mean(axis=0)
                # Convert to spikes per second
                psth_sac = psth_sac / tstep
                psth_grey = psth_grey / tstep
                psth[0, i, j, k, :] = psth_sac
                psth[1, i, j, k, :] = psth_grey
                ax.axvline(sacfr / refresh_rate * 1000,
                           color='red',
                           linestyle='dashed',
                           linewidth=.5)
                ax.plot(t * 1000, psth_sac, label='Saccadic trans.')
                ax.plot(t * 1000, psth_grey, label='Grey trans.')
                ax.set_yticks([])
                ax.set_xticks([])
                # Cosmetics
                plf.spineless(ax)
                if j == k:
                    ax.set_facecolor((1, 1, 0, 0.15))
                if j == 0:
                    ax.set_xlabel(f'{k}')
                    if k == 3:
                        ax.legend(fontsize='xx-small', loc=0)
                if k == 0:
                    ax.set_ylabel(f'{j}')

        # Add an encompassing label for starting and target positions
        ax0 = fig.add_axes([0.08, 0.08, .86, .86])
        plf.spineless(ax0)
        ax0.patch.set_alpha(0)
        ax0.set_xticks([])
        ax0.set_yticks([])
        ax0.set_ylabel('Start position')
        ax0.set_xlabel('Target position')
        plt.suptitle(f'{exp_name}\n{stimname}\n{clusterids[i]}')
        plt.savefig(os.path.join(savedir, f'{clusterids[i]}.svg'))
        plt.close()
    # Save results
    keystosave = [
        'fixfr', 'sacfr', 't', 'averageshift', 'barwidth', 'seed', 'trans',
        'saccadetr', 'greytr', 'nton_sac', 'nton_grey', 'stimname',
        'sacspikes', 'greyspikes', 'psth', 'nfr', 'parameters'
    ]
    data_in_dict = {}
    for key in keystosave:
        data_in_dict[key] = locals()[key]

    np.savez(os.path.join(savedir, str(stim_nr) + '_data'), **data_in_dict)
    print(f'Analysis of {stimname} completed.')