Esempio n. 1
0
    def check_run_indices(self):
        for key in self.key_source.fetch('KEY'):
            repro = 'SAM'
            basedir = BASEDIR + key['cell_id']
            spikefile = basedir + '/samallspikes1.dat'
            if os.path.isfile(spikefile):
                stimuli = load(basedir + '/stimuli.dat')
                traces = load_traces(basedir, stimuli)
                spikes = load(spikefile)
                spi_meta, spi_key, spi_data = spikes.selectall()

                globalefield = Runs.GlobalEFieldPeaksTroughs()
                localeodpeaks = Runs.LocalEODPeaksTroughs()
                localeod = Runs.LocalEOD()
                globaleod = Runs.GlobalEOD()
                globaleodpeaks = Runs.GlobalEODPeaksTroughs()
                spike_table = Runs.SpikeTimes()
                # v1trace = Runs.VoltageTraces()

                for run_idx, (spi_d,
                              spi_m) in enumerate(zip(spi_data, spi_meta)):
                    if run_idx != spi_m['index']:
                        print('Indices do not match for key', key)
                    else:
                        print('.', end='', flush=True)
Esempio n. 2
0
def process_wn(path_to_folder, fname, data_length):
    """
    Function loads white noise used in stimulation
    :param path_to_folder: folder containting the experimental subfolders
    :param fname: white noise file name joined with the subfolder path
    :param samp_rate: desired sample rate (to match the recording sample rate)
    :param data_length: number of samples in spike train (must match to length white noise)
    :param WN_dur: duration of the white noise
    :return datasWN: vector containing white noise values only.
    """
    # wn_dur /= 1000 #    convert to seconds
    # wn_N_samples = wn_dur*samp_rate

    #   define the input data
    filenameWN = ppjoin(path_to_folder, fname)

    #   get the file, extract the three data subunits
    relacs_file = load(filenameWN)

    #   if relacs file is empty or too short due to aborted RePro presentation
    #   "try:" provides normal termination of the loop instead of error
    try:
        metasWN, _, datasWN = relacs_file.selectall()

    except:
        return None

    #   check if the number of samples matches the number samples in convolved spike train
    datas_whitenoise = datasWN[0][:,1]
    if datas_whitenoise.shape != data_length:
        timepoints_new = np.linspace(datasWN[0][0,0],datasWN[0][-1,0], data_length)
        datas_interpolated = np.interp(timepoints_new, datasWN[0][:,0], datasWN[0][:,1])

    return datas_interpolated
Esempio n. 3
0
def segment_extraction(fn1, fn2, ix, cur):
    """
    Detects spikes and extracts the segment around the spike, of desired length
    :param fn1: trace file name
    :param fn2: spikes file name
    :param index:
    :return spike_mat:2d matrix of spike shapes
    :return spike_time: 2d matrix of spike times
    """

    #   parse the Relacs file
    relacs_file1 = load(fn1)
    relacs_file2 = load(fn2)

    #   select the beloved section of Relacs file
    try:
        metas, _, spike_data = relacs_file2.select({
            "ReProIndex": int(ix),
            "I": cur
        })
        _, _, trace_data = relacs_file1.select({
            "ReProIndex": int(ix),
            "I": cur
        })
    except:
        return None

    shape_list = []

    for i in range(len(metas)):
        spikeses = spike_data[i][spike_data[i] > 0]
        #   todo: make this going
        # spike_data[i][ 0 < spike_data[i] < np.float(metas[0]["Settings"]["Timing"]["duration"].split("ms")[0])]
        spike_wavelet = np.zeros([spikeses.shape[0], 100])
        for s in range(len(spikeses)):
            t_index = np.where(trace_data[i][:, 0] == spikeses[s])
            spike_wavelet[s, :] = trace_data[i][t_index[0][0] -
                                                20:t_index[0][0] + 80, 1]

        shape_list.append(spike_wavelet)

    return np.vstack(shape_list), metas
Esempio n. 4
0
    def _make_tuples(self, key):
        filename = BASEDIR + key['cell_id'] + '/baseisih1.dat'
        if os.path.isfile(filename):
            fi = load(filename)

            for i, (fi_meta, fi_key, fi_data) in enumerate(zip(*fi.selectall())):

                fi_data = np.asarray(fi_data).T
                row = {'block_no': i, 'cell_id': key['cell_id']}
                for (name, _), dat in zip(fi_key, fi_data):
                    row[name.lower()] = dat

                self.insert1(row)
Esempio n. 5
0
    def _make_tuples(self, key):
        filename = BASEDIR + key['cell_id'] + '/baseisih1.dat'
        if os.path.isfile(filename):
            fi = load(filename)

            for i, (fi_meta, fi_key,
                    fi_data) in enumerate(zip(*fi.selectall())):

                fi_data = np.asarray(fi_data).T
                row = {'block_no': i, 'cell_id': key['cell_id']}
                for (name, _), dat in zip(fi_key, fi_data):
                    row[name.lower()] = dat

                self.insert1(row)
Esempio n. 6
0
def spont_act(path_to_folder, subfolder, info):
    """
    main calls various sub-functions to exctract the data and plot the raster, return map and interval histogram
    :param path_to_folder: folder containing experiments
    :param subfolder: folder containing cell experimental data
    :param info: experiment metadata
    :return fnames
    """
    #   define the input data
    filename = ppjoin(path_to_folder, subfolder, "saveevents-Spikes-1.dat")

    #   get the file, extract the three data subunits
    relacs_file = load(filename)

    #   if relacs file is empty or too short due to aborted RePro presentation
    #   "try:" provides normal termination of the loop instead of error
    try:
        metas, _, datas = relacs_file.selectall()

    except:
        return None

    #   define list for rug filenames
    fnames_spont = []

    #   for each iteration of the same RePro
    for i in range(0, len(metas)):

        #   print processed RePro iteration
        print("Spont activity ReProIx", i)

        #   sorts spikes
        aa = [metas[i]]
        #   conversion into miliseconds
        spikeDict, stim_amps = fi_spikes_sort(aa, datas[i][:, 0] * 1000)

        #   computes instantaneous spike frequencies based on spikes
        freqDict, timeDict = fi_instant_freq(
            spikeDict)  #, metas[counter[i]:counter[i+1]])
        freq_continuous, freq_continuous2, timeLine, freqSD_continuous = fi_inst_freq_continuous(
            spikeDict, freqDict, metas)

        #   plots
        fnames = sp_raster_plot(i, path_to_folder, subfolder, spikeDict, freq_continuous, freq_continuous2, timeDict,\
                    timeLine, metas[i], info, fileN = i)

        #   appends the list of figure file names
        fnames_spont.append(fnames)

    return fnames_spont
Esempio n. 7
0
def read_info(path_to_folder, subfolder):
    """
    Loads info.dat containing experiment metadata and extracts and returns information of interest
    :param path_to_folder : path to the subfolders, containg experimental data
    :param subfolder : subfolder containing experiment/cell data
    :return meta_info :  returns name of the segment in which the recording was performed

    """

    filename = ppjoin(path_to_folder, subfolder, "info.dat")

    meta_info = load(filename)
    # print(meta_info)
    return (meta_info)
Esempio n. 8
0
def add_baseline_isi(baselinefile, nix_file):
    fi = load(baselinefile)

    for i, (fi_meta, fi_key, fi_data) in enumerate(zip(*fi.selectall())):
        secname = 'Baseline-ISI-Histogram-%i' % (i, )
        block = nix_file.create_block(secname, 'nix.analysis')
        fi_data = np.asarray(fi_data).T
        for (name, unit), dat in zip(fi_key, fi_data):
            if unit == 'HZ': unit = 'Hz' # fix bug in relacs

            fi_curve_data = block.create_data_array(name, "nix.trace", nix.DataType.Double, dat.shape)
            if unit != '1':
                fi_curve_data.unit = unit
            fi_curve_data.label = name
            fi_curve_data.data[:] = dat.astype(float)
            fi_curve_data = None

        sec = nix_file.create_section(secname, "nix.metadata")
        block.metadata = sec
        insert_metadata(sec, fi_meta)
        block = None
Esempio n. 9
0
def add_baseline_isi(baselinefile, nix_file):
    fi = load(baselinefile)

    for i, (fi_meta, fi_key, fi_data) in enumerate(zip(*fi.selectall())):
        secname = 'Baseline-ISI-Histogram-%i' % (i, )
        block = nix_file.create_block(secname, 'nix.analysis')
        fi_data = np.asarray(fi_data).T
        for (name, unit), dat in zip(fi_key, fi_data):
            if unit == 'HZ': unit = 'Hz'  # fix bug in relacs

            fi_curve_data = block.create_data_array(name, "nix.trace",
                                                    nix.DataType.Double,
                                                    dat.shape)
            if unit != '1':
                fi_curve_data.unit = unit
            fi_curve_data.label = name
            fi_curve_data.data[:] = dat.astype(float)
            fi_curve_data = None

        sec = nix_file.create_section(secname, "nix.metadata")
        block.metadata = sec
        insert_metadata(sec, fi_meta)
        block = None
Esempio n. 10
0
    # overwrite with parameters
    for opt, arg in opts:
        if opt == '-h':
            print helptxt
            sys.exit()

    relacsdir, nix_filename = args
    if relacsdir[-1] != '/':
        relacsdir += '/'


    nix_file = nix.File.open(nix_filename, nix.FileMode.Overwrite)


    stimuli = load(relacsdir + 'stimuli.dat')
    spike_times = []
    recordings_block_name = [e.strip() for e in relacsdir.split('/') if e][-1]


    #------------ add traces -------------------

    nix_traces = add_traces(relacsdir, stimuli, nix_file, recordings_block_name)
    recording_block = [k for k in nix_file.blocks if k.name == recordings_block_name][0]

    add_info(nix_file, relacsdir, recording_block)

    nix_spiketimes = recording_block.create_data_array('spikes', 'nix.event.spiketimes', nix.DataType.Double, (0,))
    nix_spiketimes.append_set_dimension()

    #------------ add fi curves-------------------
Esempio n. 11
0
def resist_plot(ReProIx, wd, expfolder, norm=False):
    """
    Takes resistances from selected RePro iterations and plots them one over the other
    :param ReProIx: list of indexes you want to plot
    :param norm: whether or not you want take difference in offsets into account
    """

    #   define file name
    filename = "membraneresistance-trace.dat"

    #   load data
    relacs_file = load(filename)

    # #   four panel figure
    # FHandles = plt.figure(figsize=(10, 10))
    # axarr_orig = FHandles.add_axes([0.1, 0.7, 0.85, 0.25])
    # axarr = FHandles.add_axes([0.10, 0.375, 0.85, 0.25])
    # axarrR = FHandles.add_axes([0.60, 0.05, 0.35, 0.25])
    # axarrTau = FHandles.add_axes([0.10, 0.05, 0.35, 0.25])

    #   three panel figure
    FHandles = plt.figure(figsize=(10, 8))
    axarr = FHandles.add_axes([0.10, 0.55, 0.85, 0.4])
    axarrR = FHandles.add_axes([0.60, 0.07, 0.35, 0.4])
    axarrTau = FHandles.add_axes([0.10, 0.07, 0.35, 0.4])

    #   define the colormap
    cmap = [
        "Blue", "DarkRed", "DarkOrange", "LimeGreen", "Gold", "Plum",
        "LightSlateGray", "DodgerBlue", "Blue", "DarkRed", "DarkOrange",
        "LimeGreen", "Gold", "Plum", "LightSlateGray", "DodgerBlue", "Blue",
        "DarkRed", "DarkOrange", "LimeGreen", "Gold", "Plum", "LightSlateGray",
        "DodgerBlue", "Blue", "DarkRed", "DarkOrange", "LimeGreen", "Gold",
        "Plum", "LightSlateGray", "DodgerBlue", "Blue", "DarkRed",
        "DarkOrange", "LimeGreen", "Gold", "Plum", "LightSlateGray",
        "DodgerBlue", "Blue", "DarkRed", "DarkOrange", "LimeGreen", "Gold",
        "Plum", "LightSlateGray", "DodgerBlue"
    ]
    # cmapBlue = ["DarkTurquoise","DarkCyan", "CadetBlue", "DeepSkyBlue", "CornFlowerBlue", "DodgerBlue", "LightSkyBlue", "LightSteelBlue"]

    #   counter
    counter = 0

    #   define trace list
    V_trace_list = []

    #   define resistance list
    R_list = []

    #   define g list
    g_list = []

    #   define tau list
    tau_list = []

    for ix in ReProIx:

        #   if relacs file is empty or too short due to aborted RePro presentation
        #   "try:" provides normal termination of the loop instead of error
        try:
            metas, _, datas = relacs_file.select({"ReProIndex": ix})

        except:
            return None
        #   convert into np array
        trace = np.array(datas)

        #   extract stimulus duration
        durStim = float(
            metas[0]["Settings"]["Stimulus"]["duration"].split('m')[0])

        #   plot original, non-normalized trace
        # axarr_orig.plot(trace[0,:,0], trace[0,:,1], color=cmap[counter])

        #   normalize to the 0 mV
        if norm == True:
            trace = aling_to_zero(trace)

        #   calculate the resistance from voltage drop in the steady-state part
        R = computes_resistance(
            trace,
            float(metas[0]["Settings"]["Stimulus"]["amplitude"].split('n')[0]),
            float(metas[0]["Settings"]["Stimulus"]["duration"].split('m')[0]))
        R_list.append(R)

        #   fits exponential
        parameters = exp_decay(
            trace[0, (durStim > trace[0, :, 0]) & (0 < trace[0, :, 0]), 0],
            trace[0, (durStim > trace[0, :, 0]) & (0 < trace[0, :, 0]), 1], 0)

        fitVoltage = parameters[0]*np.exp(-parameters[1]*trace[0, (durStim > trace[0,:,0]) & (0<trace[0,:,0]), 0])\
                     +parameters[2]

        #   append tau_list
        tau_list.append(1 / parameters[1])

        #   print RePro iteration
        print("ReProIx", ix, "Iterations", len(metas))

        #   append trace list
        V_trace_list.append(trace[0, :, 1])

        #   draw within loop
        axarr.plot(trace[0, :, 0], trace[0, :, 1], color=cmap[counter])

        #   extracts the g & plots
        if "SyncPulse" in metas[0]["Status"].keys():
            g_list.append(float(metas[0]["Status"]["g"].split('n')[0]))
            #   draws R against g
            axarrR.plot(float(metas[0]["Status"]["g"].split('n')[0]),
                        R,
                        'o',
                        color=cmap[counter])
            axarrTau.plot(float(metas[0]["Status"]["g"].split('n')[0]),
                          1 / parameters[1],
                          'o',
                          color=cmap[counter])
            axarr.plot(trace[0,
                             (durStim > trace[0, :, 0]) & (0 < trace[0, :, 0]),
                             0],
                       fitVoltage,
                       '--',
                       color='k')
            axarr.text(0.05,
                       0.05 + 0.05 * counter,
                       " ".join(['g = ', metas[0]["Status"]["g"]]),
                       color=cmap[counter],
                       transform=axarr.transAxes,
                       fontsize=10)

        #   counter increase
        counter = counter + 1

    #   draws plot, flips and transforms data holders
    # time=trace[0,:,0]
    # V_trace_list = np.array(V_trace_list)
    # V_trace_list = V_trace_list.T
    # axarr.plot(time, V_trace_list)
    # axarr.fill_between(trace[0,:,0], trace[0,:,1]+trace[0,:,2], trace[0,:,1]-trace[0,:,2], color=cmapBlue[counter], alpha=0.2)

    #   resistance
    print(R_list)

    #   writes x labels
    axarr.set_xlabel('Time [ms]')

    #   writes y labels
    axarr.set_ylabel('Relative voltage [mV]')

    #   grid
    axarr.grid(b=True)

    #   plot comments and other annotations
    axarr.text(0.25,
               0.90,
               " ".join(['Apteronotus leptorhynchus', expfolder]),
               transform=axarr.transAxes,
               fontsize=10)
    axarr.text(0.65,
               0.15,
               " ".join([
                   'Stimulus amplitude:',
                   metas[0]["Settings"]["Stimulus"]["amplitude"]
               ]),
               transform=axarr.transAxes,
               fontsize=10)
    axarr.text(0.65,
               0.05,
               " ".join([
                   'Stimulus duration:',
                   metas[0]["Settings"]["Stimulus"]["duration"]
               ]),
               transform=axarr.transAxes,
               fontsize=10)
    # axarrR.text(0.25, 0.15, " ".join(['Resistance:', comments4["Rss"]]), transform=axarrR.transAxes, fontsize = 10, color = 'r')

    #   draws R against g
    if "SyncPulse" in metas[0]["Status"].keys():
        # axarrR.plot(g_list, R_list, 'o')
        axarrR.set_ylabel('Membrane resistance [MOhm]')
        axarrR.set_xlabel('g leak [nS]')
        axarrTau.set_ylabel('Tau [ms]')
        axarrTau.set_xlabel('g leak [ns]')

    #   sets lims
    axarrR.set_xlim(min(g_list) * 1.1, max(g_list) * 1.1)
    axarrTau.set_xlim(min(g_list) * 1.1, max(g_list) * 1.1)
    axarrR.set_ylim(0, max(R_list) * 1.1)
    axarrTau.set_ylim(-1, max(tau_list) * 1.1)

    #   Save figures
    FHandles.savefig(ppjoin(".".join([expfolder, "resistance", 'png'])),
                     transparent=True)
    FHandles.savefig(ppjoin(".".join([expfolder, "resistance", 'svg'])),
                     transparent=True)
Esempio n. 12
0
def wht_noise(path_to_folder, subfolder, info):
    """
    Main engine, opens the spike files.

    :param path_to_folder:  folder with recordings
    :param subfolder:       folder containing the experiment/cell
    :param info:            experimental data from info.dat
    :return fnames:         list of figure names to be used in the html build

    """

    #   define sample rate
    FS = 20000

    #   define the Gauss kernel width (= 1SD)
    sigma = 0.001  # seconds, from Berman & Maler 1998

    #   define the input data
    filename = ppjoin(path_to_folder, subfolder, "stimulus-whitenoise-spikes.dat")

    #   get the file, extract the three data subunits
    relacs_file = load(filename)

    #   extract RePro indices
    try:
        ReProIx = relacs_file.fields[('ReProIndex',)]

    except:
        return None

    #   convert set objet into a list
    ReProIx = list(ReProIx)
    ReProIx.sort()

    #   define empty list containing figure names
    fnames = []

    for ix in ReProIx:

        #   if relacs file is empty or too short due to aborted RePro presentation
        #   "try:" provides normal termination of the loop instead of error
        try:
            metas, _, datas = relacs_file.select({"ReProIndex": ix})

        except:
            return None

        print("ReProIx", ix, "Iterations", len(metas))

        #   determine figure handles
        fig = figure_handles()

        #   FFT is defined as something + 1, due to mlab reasoning
        nFFT = 2048
        FFT = (nFFT/2)+1

        #   prepare empty variables
        coh = np.zeros([len(metas), FFT], )
        coh_short = np.zeros([len(metas), FFT], )
        P_csd = np.zeros([len(metas), FFT], dtype=complex)
        P_csd_short = np.zeros([len(metas), FFT], dtype=complex)
        P_psd = np.zeros([len(metas), FFT])
        P_psd_short = np.zeros([len(metas), FFT])
        H = np.zeros([len(metas), FFT],)# dtype=complex)
        H_short = np.zeros([len(metas), FFT],)# dtype=complex)
        MI = np.zeros([len(metas), FFT], )
        MI_short = np.zeros([len(metas), FFT], )
        #   number of stimulus iterations

        for i in range(0, len(metas)):

            color_spread = np.linspace(0.35,0.8, len(metas))
            cmap = [ cm.Greys(x) for x in color_spread ]

            #   extract meta infos
            wnFname = metas[i]["envelope"]
            wnDur = float(metas[i]["Settings"]["Waveform"]["duration"].split("m")[0])  # duration in miliseconds

            #   conversions
            spikes = np.array(datas[i])

            #   conversions
            wnDur /= 1000   #   conversion to miliseconds
            spikes /= 1000

            print(spikes.shape)
            convolved_Train, _ = train_convolve(spikes, sigma, FS, wnDur)
            print(sum(convolved_Train)/FS)
            wNoise = process_wn(path_to_folder, wnFname, len(convolved_Train))

            #   compute coherence, mutual information, transfer and the power spectra and cross-spectra density
            freq, coh[i,:], coh_short[i,:], H[i,:], H_short[i,:], MI[i,:], MI_short[i,:], \
                P_csd[i,:], P_csd_short[i,:], P_psd[i,:], P_psd_short[i,:] \
                = cohere_transfere_MI (convolved_Train, wNoise, nFFT, FS)

        #   plot coherence, mutual information etc....
        plot_the_lot(fig, freq, coh, coh_short, MI, MI_short, H, H_short, metas, cmap, np.array(datas))

        avgCoh, avgCoh_short, avgH, avgH_short, mut_inf, mut_inf_short = compute_avgs(coh, coh_short, H, H_short, MI, MI_short)

        plot_the_lot(fig, freq, avgCoh, avgCoh_short, mut_inf, mut_inf_short, avgH, avgH_short, metas, cmap = [cm.Reds(0.6)],raster='empty', annotation=True)

        fig[2].text(0.05, 0.95, " ".join(['Species:', info["Subject"]["Species"]]), transform=fig[1].transAxes,
                fontsize=10)
        fig[2].text(0.05, 0.90, " ".join(['ELL Segment:', info["Cell"]["Location"]]), transform=fig[1].transAxes,
                fontsize=10)

        #   define file name
        filename = "".join([str(metas[i]["ReProIndex"]), '_', 'whitenoise'])
        fnames.append(filename)

        fig[0].savefig(ppjoin(path_to_folder, subfolder, ".".join([filename, 'png'])), transparent=True)
        fig[0].savefig(ppjoin(path_to_folder, subfolder, ".".join([filename, 'svg'])), transparent=True)

        plt.close()

    return fnames
Esempio n. 13
0
def resistance(path_to_folder, subfolder, info):
    """
    Resistance plots membrane resistance data. For each RePro run a plot is made
    :param path_to_folder: path to the folder, containing cell/experiment data subfolders
    :param subfolder: subfolder containing cell/experiment data
    :return: Nothing
    :raise: Nothing
    """

    #   define the input data
    filename4 = ppjoin(path_to_folder, subfolder,
                       "membraneresistance-trace.dat")
    filename5 = ppjoin(path_to_folder, subfolder,
                       "membraneresistance-expfit.dat")

    #   get the file, extract the three data subunits
    relacs_file4 = load(filename4)
    relacs_file5 = load(filename5)

    #   if relacs file is empty or too short due to aborted RePro presentation
    #   "try:" provides normal termination of the loop instead of error
    try:
        metas4, _, datas4 = relacs_file4.selectall()
        metas5, _, datas5 = relacs_file5.selectall()

    except:
        return None

    #   count the RePro iterations
    resist = np.array(datas4)
    resist_expfit = np.array(datas5)
    ResistIter = len(datas4)

    #   setup plot dimension, 8" width, each RePro repetition gets 3" height
    # TODO: test figure output with a higher dpi settings (220 for retina display, 300 for print)
    # FHandles = plt.figure(figsize=(8, 4*ResistIter))#, dpi=220)

    #   horizontal plot positioning, h size, v size of individual axis pair
    hist_bot, hist_spacer, trace_bot = 0.1, 0.05, 0.22
    hist_width, trace_width = 0.1, 0.75
    # hist_height, trace_height = 0.75/ResistIter, 0.75/ResistIter
    hist_height, trace_height = 0.75, 0.75

    #   empty list for filename output
    fnames = []
    for i in range(0, ResistIter):
        FHandles = plt.figure(figsize=(10, 3))  #, dpi=220)
        #   loads meta data for memresistance-trace and mem.resistance-expfit
        comments4 = metas4[i]
        comments5 = metas5[i]

        #   print RePro iteratons
        print("Resistance ReProIx", str(comments4["ReProIndex"]))

        #   defines voltage distribution axis, number and position
        axarrL = FHandles.add_axes([hist_bot, 0.15, hist_width, hist_height])

        #   histogram calculation for the Vrest distribution
        hist, bins = np.histogram(resist[i][resist[i][:, 0] < 0, 1],
                                  50,
                                  density=True)
        bincenters = 0.5 * (bins[1:] + bins[:-1])
        axarrL.plot(-hist, bincenters, 'dodgerblue', linewidth=1)

        #   find a V_rest
        memPotCalc = hist_peak_search(-hist, bincenters)
        # print(memPotCalc)

        #   defines resistance axis, number and position
        axarrR = FHandles.add_axes(
            [trace_bot, 0.15, trace_width, trace_height])

        #   draws plot
        axarrR.plot(resist[i][:, 0], resist[i][:, 1], color='dodgerblue')
        axarrR.fill_between(resist[i][:, 0],
                            resist[i][:, 1] + resist[i][:, 2],
                            resist[i][:, 1] - resist[i][:, 2],
                            color='dodgerblue',
                            alpha=0.2)

        #   set limits on the right plot
        axarrR.set_xlim(min(resist[i][:, 0]), max(resist[i][:, 0]))
        axarrR.set_ylim(
            min(resist[i][:, 1]) - abs(min(resist[i][:, 1]) * 0.05),
            max(resist[i][:, 1]) + abs(max(resist[i][:, 1]) * 0.05))

        #   set limits on the left plot
        axarrL.set_ylim(axarrR.get_ylim())
        axarrL.set_xlim(-max(hist), min(hist))

        #   define (pre-)stimulus limits
        preStimLim = [
            min(resist[i][resist[i][:, 0] < 0, 0]),
            max(resist[i][resist[i][:, 0] < 0, 0])
        ]

        #   draw line marking resting potential obtained by Relacs
        memPotentialRlx = [
            float(comments4["Vrest"].split('m')[0]),
            float(comments4["Vrest"].split('m')[0])
        ]
        axarrR.plot(preStimLim, memPotentialRlx, color='r')
        axarrL.axhline(memPotentialRlx[0], color='r', linewidth=1)

        ##   draw line marking resting potential calculated from trace data
        # memPotCalc = [np.round(np.median(resist[i][resist[i][:,0]<0,1])), np.round(np.median(resist[i][resist[i][:,0]<0,1]))]
        if memPotCalc != []:
            axarrR.plot(preStimLim, [memPotCalc[0], memPotCalc[0]], color='m')
            axarrL.axhline(memPotCalc[0], color='gold', linewidth=1)
            axarrR.text(0.25,
                        0.75,
                        "".join(['V_rest:', str(memPotCalc[0])]),
                        transform=axarrR.transAxes,
                        fontsize=10,
                        color='gold')

        #   if V_rest fails:
        if memPotCalc == []:
            memPotCalc = [0, 0]
            memPotCalc[0] = np.median(resist[i][resist[i][:, 0] < 0, 1])
            axarrR.plot(preStimLim, [memPotCalc[0], memPotCalc[0]], color='m')
            axarrL.axhline(memPotCalc[0], color='gold', linewidth=1)
            axarrR.text(0.25,
                        0.75,
                        "".join(['V_rest:', str(memPotCalc[0])]),
                        transform=axarrR.transAxes,
                        fontsize=10,
                        color='gold')

        #   plot comments and other annotations
        axarrR.text(0.25,
                    0.95,
                    "".join(['Species:', info["Subject"]["Species"]]),
                    transform=axarrR.transAxes,
                    fontsize=10)
        axarrR.text(0.25,
                    0.90,
                    "".join(['ELL Segment:', info["Cell"]["Location"]]),
                    transform=axarrR.transAxes,
                    fontsize=10)
        axarrR.text(0.25,
                    0.85,
                    "".join(['RePro Ix:',
                             str(comments4["ReProIndex"])]),
                    transform=axarrR.transAxes,
                    fontsize=10)
        axarrR.text(0.25,
                    0.8,
                    "".join(['RePro time:',
                             str(comments4["ReProTime"])]),
                    transform=axarrR.transAxes,
                    fontsize=10)
        #axarrR.text(0.25, 0.75, "".join(['V_rest:', comments4["Vrest"]]), transform=axarrR.transAxes, fontsize=10, color='r')
        axarrR.text(0.25,
                    0.7,
                    "".join(['DC current:', comments4["Status"]["Current-1"]]),
                    transform=axarrR.transAxes,
                    fontsize=10)
        axarrR.text(0.25,
                    0.65,
                    "".join(
                        ['Amp. mode:', comments4["Status"]["AmplifierMode"]]),
                    transform=axarrR.transAxes,
                    fontsize=10)
        axarrR.text(0.25,
                    0.60,
                    "".join([
                        'Stimulus:',
                        comments4["Settings"]["Stimulus"]["amplitude"]
                    ]),
                    transform=axarrR.transAxes,
                    fontsize=10)
        axarrR.text(0.25,
                    0.20,
                    "".join(['TauM:', comments5["Taum"]]),
                    transform=axarrR.transAxes,
                    fontsize=10,
                    color='r')
        axarrR.text(0.25,
                    0.15,
                    "".join(['Resistance:', comments4["Rss"]]),
                    transform=axarrR.transAxes,
                    fontsize=10,
                    color='r')
        if "SyncPulse" in comments4["Status"].keys():
            axarrR.text(0.80,
                        0.40,
                        "".join(
                            ['DynClamp:', comments4["Status"]["SyncPulse"]]),
                        transform=axarrR.transAxes,
                        fontsize=10)
            axarrR.text(0.80,
                        0.35,
                        "".join(['g:', comments4["Status"]["g"]]),
                        transform=axarrR.transAxes,
                        fontsize=10)
            axarrR.text(0.80,
                        0.30,
                        "".join(['E:', comments4["Status"]["E"]]),
                        transform=axarrR.transAxes,
                        fontsize=10)
            axarrR.text(0.80,
                        0.25,
                        "".join(['gVgate:', comments4["Status"]["gvgate"]]),
                        transform=axarrR.transAxes,
                        fontsize=10)
            axarrR.text(0.80,
                        0.20,
                        "".join(['EVgate:', comments4["Status"]["Evgate"]]),
                        transform=axarrR.transAxes,
                        fontsize=10)
            axarrR.text(0.80,
                        0.15,
                        "".join(['VgateTau:',
                                 comments4["Status"]["vgatetau"]]),
                        transform=axarrR.transAxes,
                        fontsize=10)
            axarrR.text(0.80,
                        0.10,
                        "".join(
                            ['VgateMid:', comments4["Status"]["vgatevmid"]]),
                        transform=axarrR.transAxes,
                        fontsize=10)
            axarrR.text(
                0.80,
                0.05,
                "".join(['VgateSlope:', comments4["Status"]["vgateslope"]]),
                transform=axarrR.transAxes,
                fontsize=10)

        durStim = [
            float(comments4["Settings"]["Stimulus"]["duration"].split('m')[0]),
            float(comments4["Settings"]["Stimulus"]["duration"].split('m')[0])
        ]
        axarrR.axvline(0, color='silver')
        axarrR.axvline(durStim[0], color='silver')

        # #   plot Relacs fitted envelope
        # axarrR.plot(resist_expfit[i][:,0], resist_expfit[i][:,1], color='r')

        #   plot internally fit curve
        axarrR.plot(resist_expfit[i][:, 0], resist_expfit[i][:, 1], color='r')

        #   fits the exponential decay over the voltage response to the current pulse
        # if memPotCalc != []:
        parameters = exp_decay(
            resist[i][(durStim[0] > resist[i][:, 0]) & (0 < resist[i][:, 0]),
                      0],
            resist[i][(durStim[0] > resist[i][:, 0]) & (0 < resist[i][:, 0]),
                      1], memPotCalc[0])

        fitVoltage = parameters[0]*np.exp(-parameters[1]*resist[i][(durStim[0] > resist[i][:,0]) & (0 < resist[i][:,0]),0])\
                     +parameters[2]

        axarrR.plot(resist[i][(durStim[0] > resist[i][:, 0]) &
                              (0 < resist[i][:, 0]), 0],
                    fitVoltage,
                    color='gold')
        axarrR.text(0.25,
                    0.50,
                    "".join([
                        'TauM_computed:',
                        str(np.round(1 / parameters[1], 1)), 'ms'
                    ]),
                    transform=axarrR.transAxes,
                    fontsize=10,
                    color='gold')

        #   find a V_rest as a peakutils.baseline
        # V_base = baseline_search(resist[i][(durStim[0] > resist[i][:,0]) & (0 < resist[i][:,0]),1], 4)
        # axarrR.plot(resist[i][(durStim[0] > resist[i][:,0]) & (0 < resist[i][:,0]),0], V_base, color='blue')

        # #   writes titles
        axarrR.set_title(subfolder + ": " + "Membrane resistance", fontsize=12)

        #   writes x labels beside the bottom most plot
        axarrR.set_xlabel('Time [ms]')
        axarrL.set_xlabel('Distribution')

        #   writes y labels
        axarrL.set_ylabel('Voltage [mV]')
        axarrL.set_xticklabels([], visible=False)
        axarrR.set_yticklabels([], visible=False)

        #   define file name
        filename = "".join([str(comments4["ReProIndex"]), '_', 'Resistance'])
        fnames.append(filename)

        FHandles.savefig(ppjoin(path_to_folder, subfolder,
                                ".".join([filename, 'png'])),
                         transparent=True)
        FHandles.savefig(ppjoin(path_to_folder, subfolder,
                                ".".join([filename, 'svg'])),
                         transparent=True)
        plt.close()

    return fnames
Esempio n. 14
0
def noise_transfer(tests, wd, expfolder):
    """"
    Plots the transfer and coherence curve into the graphic file (*.png, *.svg)
        Requires:
    :param tests:   dictionary containing experimental conditions as keys and ReProIx as values
    :param wd: location of the experimental folder
    :param expfolder: name of the experimental folder

    Outputs:
            graphic files containing coherence, MI and transfer
    """

    #   define sample rate
    FS = 20000

    #   define the Gauss kernel width (= 1SD)
    sigma = 0.001  # seconds, from Berman & Maler 1998

    #   define the input data
    filename = ppjoin(wd, expfolder, "stimulus-whitenoise-spikes.dat")

    #   read in some experimental data
    (_, _, filenames) = next(walk(ppjoin(wd, expfolder)))
    if "info.dat" in filenames:
        exp_info = dict(read_info(wd, expfolder)[0])
        print(exp_info)
    else:
        exp_info = {"Cell": {"Location": "UNLABELED"}}

    #   load data
    relacs_file = load(filename)

    #   four panel figure
    FHandles = figure_handles()

    #   define colors
    color_spread = np.linspace(0.5, 0.9, len(tests))
    cmap = [cm.Greys(x) for x in color_spread]
    # cmap = [ cm.viridis(x) for x in color_spread ]

    #   color counter
    col_count = 0

    #   FFT is defined as something + 1, due to mlab reasoning
    nFFT = 1024
    FFT = (nFFT / 2) + 1

    #   define spike dict for the raster plot
    spike_dict = OrderedDict()

    for k, v in tests.items():
        #   get RePro indexes of the same experimental condition
        ReProIx = tests[k]

        #   define empty variables, containing traces of the same experiments, different RePros
        coh_repro = np.zeros([len(ReProIx), FFT])
        coh_repro_short = np.zeros([len(ReProIx), FFT])
        H_repro = np.zeros([len(ReProIx), FFT])
        H_repro_short = np.zeros([len(ReProIx), FFT])
        MI_repro = np.zeros([len(ReProIx), FFT])
        MI_repro_short = np.zeros([len(ReProIx), FFT])

        spike_list = []

        #   define/reset counter
        counter = 0

        #   iteration of same experimental condition
        for ix in ReProIx:

            try:
                metas, _, datas = relacs_file.select({"ReProIndex": ix})

            except:
                return None

            #   define empty variables
            coh = np.zeros([len(metas), FFT])
            coh_short = np.zeros([len(metas), FFT])
            P_csd = np.zeros([len(metas), FFT], dtype=complex)
            P_csd_short = np.zeros([len(metas), FFT], dtype=complex)
            P_psd = np.zeros([len(metas), FFT])
            P_psd_short = np.zeros([len(metas), FFT])
            H = np.zeros([len(metas), FFT])
            H_short = np.zeros([len(metas), FFT])
            MI = np.zeros([len(metas), FFT])
            MI_short = np.zeros([len(metas), FFT])

            meta_repros = []

            #   number of stimulus iterations
            for i in range(0, len(metas)):

                #   extract meta infos
                wnFname = metas[i]["envelope"]
                wnDur = float(
                    metas[i]["Settings"]["Waveform"]["duration"].split(
                        "m")[0])  # duration in miliseconds

                #   spikes
                spikes = np.array(datas[i])

                #   conversions
                wnDur /= 1000  #   conversion to miliseconds
                spikes /= 1000

                print(spikes.shape)
                convolved_Train, _ = train_convolve(spikes, sigma, FS, wnDur)
                print(sum(convolved_Train) / FS)
                wNoise = process_wn(wd, wnFname, len(convolved_Train))

                #   compute coherence, mutual information, transfer and the power spectra and cross-spectra density
                freq, coh[i,:], coh_short[i,:], H[i,:], H_short[i,:], MI[i,:], MI_short[i,:], \
                    P_csd[i,:], P_csd_short[i,:], P_psd[i,:], P_psd_short[i,:] \
                    = cohere_transfere_MI (convolved_Train, wNoise, nFFT, FS)

            #   compute averages over iterations of the *same* repro
            coh_repro[counter,:], coh_repro_short[counter,:], \
                H_repro[counter,:], H_repro_short[counter,:], \
                MI_repro[counter,:], MI_repro_short[counter,:] = compute_avgs(coh, coh_short, H, H_short, MI, MI_short)

            #   store one of the metas
            meta_repros.append(metas[0])
            #   store all the spikes from the same type of experiment
            spike_list.append(datas)
            counter = counter + 1
        #   plot the lot
        plot_the_lot(FHandles,
                     freq,
                     coh_repro,
                     coh_repro_short,
                     MI_repro,
                     MI_repro_short,
                     H_repro,
                     H_repro_short,
                     metas,
                     cmap=[cmap[col_count]],
                     raster='empty',
                     annotation=False,
                     comparison=True)

        #   compute the average of the different repro presentations (with same test conditions)
        avgCoh, avgCoh_short, avgH, avgH_short, avgMI, avgMI_short = compute_avgs(
            coh_repro, coh_repro_short, H_repro, H_repro_short, MI_repro,
            MI_repro_short)

        #   plot the lot
        plot_the_lot(FHandles,
                     freq,
                     avgCoh,
                     avgCoh_short,
                     avgMI,
                     avgMI_short,
                     avgH,
                     avgH_short,
                     meta_repros,
                     cmap=[cmap[col_count]],
                     raster='empty',
                     annotation=False,
                     comparison=True)

        #   provide gVgate values
        FHandles[1].text(0.5,
                         0.8 - 0.05 * col_count,
                         " ".join([
                             r'$g_{Vgate}$', ' = ',
                             meta_repros[0]["Status"]["gvgate"].split(".")[0],
                             'nS'
                         ]),
                         color=cmap[col_count],
                         transform=FHandles[1].transAxes,
                         fontsize=10)
        FHandles[5].text(
            0.5,
            0.8 - 0.05 * col_count,
            " ".join([
                r'$\tau_{Vgate}$', ' = ',
                meta_repros[0]["Status"]["vgatetau"].split(".")[0], 'ms'
            ]),
            color=cmap[col_count],
            transform=FHandles[5].transAxes,
            fontsize=10)

        #   update spike dictionary
        spike_dict[k] = spike_list

        #   update the color counter
        col_count += 1

    #   write FFT value
    FHandles[1].text(0.05,
                     0.90,
                     " ".join(['FFT = ', str(nFFT)]),
                     color='k',
                     transform=FHandles[1].transAxes,
                     fontsize=10)

    #   plot raster plot
    spike_iter_count = 0
    for i, k in enumerate(spike_dict):
        for j in range(len(spike_dict[k])):
            for gnj in range(len(spike_dict[k][j])):
                FHandles[7].plot(spike_dict[k][j][gnj],
                                 np.zeros(len(spike_dict[k][j][gnj])) +
                                 spike_iter_count,
                                 '|',
                                 color=cmap[i],
                                 ms=12)
                spike_iter_count += 1

    FHandles[7].set_title(
        ppjoin(".".join([
            exp_info["Cell"]["Location"], ':', expfolder, "dyn_noise_transfer",
            '_'.join([k for k, v in tests.items()]), '_'.join([
                str(x) for sublist in [v for k, v in tests.items()]
                for x in sublist
            ]), "fft",
            str(nFFT)
        ])))
    #   Save figures
    FHandles[0].savefig(ppjoin(".".join([
        expfolder, "coherence_transfer",
        '.'.join([k for k, v in tests.items()]), '.'.join([
            str(x) for sublist in [v for k, v in tests.items()]
            for x in sublist
        ]), "fft",
        str(nFFT), 'svg'
    ])),
                        transparent=True)
    FHandles[0].savefig(ppjoin(".".join([
        expfolder, "coherence_transfer",
        '.'.join([k for k, v in tests.items()]), ".".join([
            str(x) for sublist in [v for k, v in tests.items()]
            for x in sublist
        ]), "fft",
        str(nFFT), 'png'
    ])),
                        transparent=True)
    #   save figures into dedicated folder if necessary
    FHandles[0].savefig(ppjoin(
        '../overviewTransfer/', ".".join([
            "_".join([
                exp_info["Cell"]["Location"], expfolder, "coherence_transfer",
                "".join([k for k, v in tests.items()]), "_".join([
                    str(x) for sublist in [v for k, v in tests.items()]
                    for x in sublist
                ]), "fft",
                str(nFFT)
            ]), 'pdf'
        ])),
                        transparent=True)
Esempio n. 15
0
    def _make_tuples(self, key):
        repro = 'BaselineActivity'
        basedir = BASEDIR + key['cell_id']
        spikefile = basedir + '/basespikes1.dat'
        if os.path.isfile(spikefile):
            stimuli = load(basedir + '/stimuli.dat')

            traces = load_traces(basedir, stimuli)
            spikes = load(spikefile)
            spi_meta, spi_key, spi_data = spikes.selectall()

            localeod = Baseline.LocalEODPeaksTroughs()
            spike_table = Baseline.SpikeTimes()

            for run_idx, (spi_d, spi_m) in enumerate(zip(spi_data, spi_meta)):
                print("\t%s repeat %i" % (repro, run_idx))

                # match index from stimspikes with run from stimuli.dat
                stim_m, stim_k, stim_d = stimuli.subkey_select(RePro=repro, Run=spi_m['index'])

                if len(stim_m) > 1:
                    raise KeyError('%s and index are not unique to identify stimuli.dat block.' % (repro,))
                else:
                    stim_k = stim_k[0]
                    stim_m = stim_m[0]
                    signal_column = \
                        [i for i, k in enumerate(stim_k) if k[:4] == ('stimulus', 'GlobalEField', 'signal', '-')][0]

                    valid = []

                    if stim_d == [[[0]]]:
                        print("\t\tEmpty stimuli data! Continuing ...")
                        continue

                    for d in stim_d[0]:
                        if not d[signal_column].startswith('FileStimulus-value'):
                            valid.append(d)
                        else:
                            print("\t\tExcluding a reset trial from stimuli.dat")
                    stim_d = valid

                if len(stim_d) > 1:
                    print(
                        """\t\t%s index %i has more one trials. Not including data.""" % (
                            spikefile, spi_m['index'], len(spi_d), len(stim_d)))
                    continue

                start_index, index = [(i, k[-1]) for i, k in enumerate(stim_k) if 'traces' in k and 'V-1' in k][0]
                sample_interval, time_unit = get_number_and_unit(
                    stim_m['analog input traces']['sample interval%i' % (index,)])

                # make sure that everything was sampled with the same interval
                sis = []
                for jj in range(1, 5):
                    si, tu = get_number_and_unit(stim_m['analog input traces']['sample interval%i' % (jj,)])
                    assert tu == 'ms', 'Time unit is not ms anymore!'
                    sis.append(si)
                assert len(np.unique(sis)) == 1, 'Different sampling intervals!'

                duration = ureg.parse_expression(spi_m['duration']).to(time_unit).magnitude

                start_idx, stop_idx = [], []
                # start_times, stop_times = [], []

                start_indices = [d[start_index] for d in stim_d]
                for begin_index, trial in zip(start_indices, spi_d):
                    start_idx.append(begin_index)
                    stop_idx.append(begin_index + duration / sample_interval)

                to_insert = dict(key)
                to_insert['repeat'] = spi_m['index']
                to_insert['eod'] = float(spi_m['EOD rate'][:-2])
                to_insert['duration'] = duration / 1000 if time_unit == 'ms' else duration
                to_insert['samplingrate'] = 1 / sample_interval * 1000 if time_unit == 'ms' else 1 / sample_interval

                self.insert1(to_insert)

                for trial_idx, (start, stop) in enumerate(zip(start_idx, stop_idx)):
                    if start > 0:
                        tmp = dict(key, repeat=spi_m['index'])
                        leod = traces['LocalEOD-1']['data'][start:stop]
                        _, tmp['peaks'], _, tmp['troughs'] = peakdet(leod)
                        localeod.insert1(tmp, replace=True)
                    else:
                        print("Negative indices in stimuli.dat. Skipping local peak extraction!")

                    spike_table.insert1(dict(key, times=spi_d, repeat=spi_m['index']), replace=True)
Esempio n. 16
0
def fi_spikes(path_to_folder, subfolder, info):


    """
    fi_spikes parses the information from ficurve-spikes.dat
    :param path_to_folder: location of the experimental folder
    :param subfolder: name of the experimental folder
    :param info: metas of experiment
    :return fnames_rug: raster plot figure names
    :return fnames_FI: file names of the FI plot

    Outputs: graphic files containing FI plots (rates, PSTH, ....) and raster plots

    """
    #   define the input data
    filename = ppjoin(path_to_folder, subfolder, "ficurve-spikes.dat")
    filename2 = ppjoin(path_to_folder, subfolder, "ficurve-data.dat")

    #   get the file, extract the three data subunits
    relacs_file = load(filename)
    relacs_file2 = load(filename2)

    #   if relacs file is empty or too short due to aborted RePro presentation
    #   "try:" provides normal termination of the loop instead of error
    try:
        #   load only metas
        metas2,_, _ = relacs_file2.selectall()
    except:
        return None

    ReProIxList = []
    for i in range(len(metas2)):
        ReProIxList.append(metas2[i]["ReProIndex"])

    #   define list for rug filenames
    fnames_rug = []
    fnames_FI = []

    #   for each iteration of the same RePro
    for i in range(len(ReProIxList)):

        #   print processed RePro iteration
        # print("FI Raster & Return ReProIx", ReProIxList[counter[i]])#metas[ReProIx[i]]["ReProIndex"])
        print("FI Raster & Return ReProIx", ReProIxList[i])

        #   select all iterations of the same repro
        metas, _, datas = relacs_file.select({"ReProIndex": ReProIxList[i]})

        #   sort spikes
        spikeDict, stim_amps = fi_spikes_sort(metas, datas)

        #   computes instantaneous spike frequencies based on spikes
        freqDict, timeDict = fi_instant_freq(spikeDict) #, metas[counter[i]:counter[i+1]])
        freq_continuous, freq_continuous2, timeLine, freqSD_continuous = fi_inst_freq_continuous(spikeDict, freqDict, metas)

        #   computes steady state, peak and rest spike frequency, based on above
        fnames2 = fi_curve2(i, path_to_folder, subfolder, timeLine, freq_continuous, freqSD_continuous, metas2, info)

        #   taus, instFreqFit, fitTimeLine = fi_tau_fit(freq_continuous2, metas)
        fnames = fi_rug_plot(i, path_to_folder, subfolder, spikeDict, freq_continuous, freq_continuous2, timeDict,\
                    timeLine, metas, info)

        fnames_rug.append(fnames)
        fnames_FI.append(fnames2)

    return fnames_rug,fnames_FI
Esempio n. 17
0
def fi_voltage_plot(ReProList, iterations):

    #   defines relative sizes of individual raster plots
    rug_bot = 0.1
    rug_width = 0.4
    rug_height = 0.67 / (len(ReProList))
    rug_step = 1 / (len(ReProList))

    #   defines the color map
    cmapBlue = [
        "Blue", "DarkTurquoise", "CadetBlue", "DeepSkyBlue", "CornFlowerBlue",
        "DodgerBlue", "LightSkyBlue", "LightSteelBlue", "DarkCyan"
    ]

    cmap = [
        "Blue", "DarkRed", "DarkOrange", "LimeGreen", "Gold", "Plum",
        "LightSlateGray", "DodgerBlue", "Blue", "DarkRed", "DarkOrange",
        "LimeGreen", "Gold", "Plum", "LightSlateGray", "DodgerBlue", "Blue",
        "DarkRed", "DarkOrange", "LimeGreen", "Gold", "Plum", "LightSlateGray",
        "DodgerBlue", "Blue", "DarkRed", "DarkOrange", "LimeGreen", "Gold",
        "Plum", "LightSlateGray", "DodgerBlue", "Blue", "DarkRed",
        "DarkOrange", "LimeGreen", "Gold", "Plum", "LightSlateGray",
        "DodgerBlue", "Blue", "DarkRed", "DarkOrange", "LimeGreen", "Gold",
        "Plum", "LightSlateGray", "DodgerBlue"
    ]

    #   define the input data file name
    filename = "ficurve-traces.dat"

    #   get the file and load it
    relacs_file = load(filename)

    #   define the absolute size of the raster & return plot
    FHandles = plt.figure(figsize=(14, 3 * len(ReProList) + 2))

    #   counter
    counter = 0

    #   define OrderedDict for freq_continuous between repros.
    freq = OrderedDict()

    #   define g_list
    g_list = []
    tau_list = []

    for k, v in ReProList.items():
        metas, _, datas = relacs_file.select({"ReProIndex": int(k), "I": v})

        trace = datas[iterations[0]][:, 1]
        timeLine = datas[iterations[0]][:, 0]

        #   add axes
        axarr = FHandles.add_axes([
            rug_bot, counter * rug_step + (0.25 / (len(ReProList) + 1)),
            rug_width, rug_height
        ])
        axarr.plot(timeLine, trace, '|', color=cmap[counter])

        axarr.set_xlabel('Time [ms]')
        axarr.set_ylabel('Voltage [ms]')

        if "SyncPulse" in metas[0]["Status"].keys():
            g_list.append(metas[0]["Status"]["gvgate"])
            tau_list.append(metas[0]["Status"]["vgatetau"])
            #   draws R against g
            axarr.text(0.02,
                       0.15,
                       " ".join(['gvgate = ', metas[0]["Status"]["gvgate"]]),
                       color=cmap[counter],
                       transform=axarr.transAxes,
                       fontsize=10)
            axarr.text(0.8,
                       0.15,
                       " ".join(['Vtau = ', metas[0]["Status"]["vgatetau"]]),
                       color=cmap[counter],
                       transform=axarr.transAxes,
                       fontsize=10)
        #   grid
        axarr.grid(b=True)

        axarr.set_xlim(0, 400)

        #   increase counter
        counter = counter + 1

    ##   prepare file names
    #   extract ReProIx, sort them and merge them
    keys = [i for i in ReProList]
    keys = sorted(list(map(int, keys)))
    keys = '.'.join(map(str, keys))
    #   extract current values
    current = list(ReProList.values())[0]
    #   join keys, currents and iterations
    name = '.'.join([keys, current, 'iter', str(iterations[0])])

    #   Save figures
    FHandles.savefig(ppjoin(".".join([expfolder, "dyn_FI_trace", name,
                                      'png'])),
                     transparent=True)
    FHandles.savefig(ppjoin(".".join([expfolder, "dyn_FI_trace", name,
                                      'svg'])),
                     transparent=True)
Esempio n. 18
0
def dyn_fi_freq(RePro, wd, expfolder):

    """
    dyn_fi_freq used FI.fi_spikes_sort, FI.fi_instant_freq and FI.fi_inst_freq_continuous to extract the steady state and peak frequencies
    :param RePro: list of RePros to be processes
    :param wd: working directory
    :param expfolder: folder containing actual experiments
    :return ss: ordered dictionary containing a list under ReProIndex. This list contains 1) nd array with desired frequencies paired with currents, gvgate and vgatetau and g values.
    :return peak: ordered dictionary containing a list under ReProIndex. This list contains 1) nd array with desired frequencies paired with currents, gvgate and vgatetau and g values.
    :return rest: ordered dictionary containing a list under ReProIndex. This list contains 1) nd array with desired frequencies paired with currents, gvgate and vgatetau and g values.
    :return metas: metas
    """

    #   define sample rate (instantaneous frequenices are computed with the 1 ms resolution)
    dt = 1

    #   define top dictionaries
    ss = OrderedDict()
    peak = OrderedDict()
    rest = OrderedDict()

    for ix in RePro:

        #   define the input data
        filename = ppjoin(wd, expfolder, "ficurve-spikes.dat")

        #   get the file, extract the three data subunits
        relacs_file = load(filename)

        #   if relacs file is empty or too short due to aborted RePro presentation
        #   "try:" provides normal termination of the loop instead of error
        try:
            metas, _, datas = relacs_file.select({"ReProIndex": ix})
        except:
            return None

        #   extracts RePro indexes
        ReProIxList = []
        for i in range(0,len(metas)):
            # comments2 = metas[i]
            # extract the unique values only one time
            ReProIxList.append(metas[i]["ReProIndex"])

        # count occurrences of same ReProIx
        ReProIx = [len(list(group)) for key, group in groupby(ReProIxList)]

        ReProIx.insert(0, 0) # insert zero for easier job in indexing
        counter = np.cumsum(ReProIx)

        #   define list for rug filenames
        fnames_rug = []

        #   print processed RePro iteration
        print("FI Raster & Return ReProIx", ReProIxList[counter[0]])#metas[ReProIx[i]]["ReProIndex"])

        #   sorts spikes
        spikeDict, stim_amps = fi_spikes_sort(metas[counter[0]:counter[1]], datas[counter[0]:counter[1]])
        #   computes instantaneous spike frequencies based on spikes
        freqDict, timeDict = fi_instant_freq(spikeDict) #, metas[counter[i]:counter[i+1]])
        freq_continuous, freq_continuous2, timeLine, freqSD_continuous = fi_inst_freq_continuous(spikeDict, freqDict, metas)

        #   define empty nd arrays to contain steady states and peak freqs alongside with injected currents
        steady_state = np.ndarray(shape=(len(freq_continuous),3), dtype=float)
        peak_freq = np.ndarray(shape=(len(freq_continuous),3), dtype=float)
        rest_freq = np.ndarray(shape=(len(freq_continuous),3), dtype=float)

        #   extract duration of initial interval without stimulus and duration of stimulus
        durDelay = int(metas[0]["Settings"]["Timing"]["delay"].strip("ms"))
        durStimulus = int(metas[0]["Settings"]["Timing"]["duration"].strip("ms"))

        counter2 = 0

        for k, v in freq_continuous.items():
        #   TODO: automatically obtain test pulse length
            steady_state[counter2,0] = float(k.strip("nA"))
            steady_state[counter2,1] = np.average(freq_continuous[k][durStimulus+durDelay-150:durStimulus+durDelay])
            steady_state[counter2,2] = np.average(freqSD_continuous[k][durStimulus+durDelay-150:durStimulus+durDelay])

            peak_freq[counter2,0] = float(k.strip("nA"))
            # peak_freq[counter2,1] = np.max(freq_continuous[k][durDelay:durDelay+100])   #   computes peak freq in certain time window
            peak_freq[counter2,1] = np.max(freq_continuous[k][durDelay:durDelay+durStimulus])   #   takes out maximum frequency event
            peak_freq[counter2,2] = freqSD_continuous[k][durDelay - 1 + np.argmax(freq_continuous[k][durDelay:durDelay+100])]

            rest_freq[counter2,0] = float(k.strip("nA"))
            rest_freq[counter2,1] = np.max(freq_continuous[k][0:durDelay])
            rest_freq[counter2,2] = np.average(freqSD_continuous[k][0:durDelay])

            counter2 = counter2+1

        ss[ix] = [steady_state, metas[0]["Status"]["gvgate"], metas[0]["Status"]["vgatetau"], metas[0]["Status"]["g"]]
        peak[ix] = [peak_freq, metas[0]["Status"]["gvgate"], metas[0]["Status"]["vgatetau"], metas[0]["Status"]["g"]]
        rest[ix] = [rest_freq, metas[0]["Status"]["gvgate"], metas[0]["Status"]["vgatetau"], metas[0]["Status"]["g"]]

    return ss, peak, metas, rest
Esempio n. 19
0
def add_spikes(stimuli, spikefile, spike_times, nix_file, nix_spiketimes):
    if 'stimspikes' in spikefile:
        repro = 'FileStimulus'
    elif 'samallspikes' in spikefile:
        repro = 'SAM'
    else:
        raise Exception('Cannot determine repro')

    print "Assuming RePro=%s" % (repro, )

    ureg = UnitRegistry()

    spikes = load(spikefile)
    spi_meta, spi_key, spi_data = spikes.selectall()

    for run_idx, (spi_d, spi_m) in enumerate(zip(spi_data, spi_meta)):
        print "\t%s run %i" % (repro, run_idx)

        if repro == 'FileStimulus':
            spi_m = add_stimulus_meta(spi_m)
        # match index from stimspikes with run from stimuli.dat
        stim_m, stim_k, stim_d = stimuli.subkey_select(RePro=repro,
                                                       Run=spi_m['index'])

        if len(stim_m) > 1:
            raise KeyError(
                '%s and index are not unique to identify stimuli.dat block.' %
                (repro, ))
        else:
            stim_k = stim_k[0]
            stim_m = stim_m[0]
            signal_column = [
                i for i, k in enumerate(stim_k)
                if k[:4] == ('stimulus', 'GlobalEField', 'signal', '-')
            ][0]

            valid = []

            if stim_d == [[[0]]]:
                print("\t\tEmpty stimuli data! Continuing ...")
                continue

            for d in stim_d[0]:
                if not d[signal_column].startswith('FileStimulus-value'):
                    valid.append(d)
                else:
                    print("\t\tExcluding a reset trial from stimuli.dat")
            stim_d = valid

        if len(stim_d) != len(spi_d):
            print(
                """\t\t%s index %i has %i trials, but stimuli.dat has %i. Trial was probably aborted. Not including data."""
                % (spikefile, spi_m['index'], len(spi_d), len(stim_d)))
            continue

        start_index, index = [(i, k[-1]) for i, k in enumerate(stim_k)
                              if 'traces' in k and 'V-1' in k][0]
        sample_interval, time_unit = get_number_and_unit(
            stim_m['analog input traces']['sample interval%i' % (index, )])

        if repro == 'FileStimulus':
            duration = ureg.parse_expression(
                spi_m['duration']).to(time_unit).magnitude
        elif repro == 'SAM':
            duration = ureg.parse_expression(
                spi_m['Settings']['Stimulus']['duration']).to(
                    time_unit).magnitude

        start_times = []

        start_indices = [d[start_index] for d in stim_d]
        for begin_index, trial in zip(start_indices, spi_d):
            start_time = begin_index * sample_interval
            start_times.append(start_time)
            spike_times.append(trial + start_time)

        start_times = np.asarray(start_times)
        durations = duration * np.ones(len(stim_d))

        tag_name = "%s-run-%i" % (repro, run_idx)
        positions = recording_block.create_data_array(tag_name + '_starts',
                                                      'nix.event.position',
                                                      nix.DataType.Double,
                                                      start_times.shape)
        positions.data.write_direct(start_times)
        positions.append_set_dimension()

        extents = recording_block.create_data_array(tag_name + '_extents',
                                                    'nix.event.extents',
                                                    nix.DataType.Double,
                                                    durations.shape)
        extents.data.write_direct(durations)
        extents.append_set_dimension()

        tag = recording_block.create_multi_tag(tag_name, 'nix.experiment_run',
                                               positions)
        tag.extents = extents
        tag.references.append(nix_spiketimes)

        for nt in nix_traces:
            tag.references.append(nt)

        sec = nix_file.create_section(tag_name, "nix.metadata")
        tag.metadata = sec

        insert_metadata(sec, stim_m)
        insert_metadata(sec, spi_m)
Esempio n. 20
0
    Extracts the resistance traces from raw data, plots them in computes standard deviation of the signal on 50 ms of signal
    before the stimulus is delivered.
    Runs from command line in the experiment subfolder: report_noise_traces.py
    Input argument is a dictionary with the RePro indexes which you want to present in the same analysis as key. Stimulus iterations
    you want to exclude must be passed as a list into a dict value. If none, use [].
    example: report_noise_traces.py -d"{'2':[1,4,6], '4':[]}"
    """
    #   todo:   replace dict with ordered dict

    wd = getcwd().split(getcwd().split("/")[-1])[0]
    expfolder = getcwd().split("/")[-1]

    ReProList = command_interpreter(sys.argv[1:])

    filename = "membraneresistance-expfit.dat"
    relacs_file = load(filename)
    metas, _, _ = relacs_file.selectall()

    #   get all RePro executions
    ReProIxList = [str(metas[i]["ReProIndex"]) for i in range(len(metas))]

    #   convert metas into dict
    metas_dict = OrderedDict()
    for i in range(len(metas)):
        metas_dict[metas[i]["ReProIndex"]] = metas[i]

    #   read raw traces
    raw_data_dict = read_stimuli_position(wd, expfolder, 'MembraneResistance',
                                          2)

    #   replace the keys with the ReProIndices
Esempio n. 21
0
def FI_plot(ReProIx, wd, expfolder, norm=False):
    """"
    Plots the FI curve into the graphic file (*.png, *.svg)
        Requires:
    :param path_to_folder: location of the experimental folder
    :param subfolder: name of the experimental folder
        Outputs:
            graphic files containing FI plots (rates and PSTH) as .png and .svg
    """
    #   define the input data
    filename = ppjoin(wd, expfolder, "ficurve-data.dat")

    #   load data
    relacs_file = load(filename)

    #   four panel figure
    FHandles = plt.figure(figsize=(15, 10))
    axarrPeak = FHandles.add_axes([0.05, 0.55, 0.40, 0.40])
    axarrSteady = FHandles.add_axes([0.55, 0.55, 0.40, 0.40])
    axarrAvg = FHandles.add_axes([0.05, 0.05, 0.40, 0.40])
    axarrBase = FHandles.add_axes([0.55, 0.05, 0.40, 0.40])

    #   define the colormap
    cmapGray = [
        "Blue", "DarkRed", "DarkOrange", "LimeGreen", "Gold", "Plum",
        "LightSlateGray", "DodgerBlue", "Blue", "DarkRed", "DarkOrange",
        "LimeGreen", "Gold", "Plum", "LightSlateGray", "DodgerBlue", "Blue",
        "DarkRed", "DarkOrange", "LimeGreen", "Gold", "Plum", "LightSlateGray",
        "DodgerBlue", "Blue", "DarkRed", "DarkOrange", "LimeGreen", "Gold",
        "Plum", "LightSlateGray", "DodgerBlue", "Blue", "DarkRed",
        "DarkOrange", "LimeGreen", "Gold", "Plum", "LightSlateGray",
        "DodgerBlue", "Blue", "DarkRed", "DarkOrange", "LimeGreen", "Gold",
        "Plum", "LightSlateGray", "DodgerBlue"
    ]
    counter = 0

    for ix in ReProIx:

        #   if relacs file is empty or too short due to aborted RePro presentation
        #   "try:" provides normal termination of the loop instead of error
        try:
            metas, _, datas = relacs_file.select({"ReProIndex": ix})

        except:
            return None
        #   convert into np array

        #   extract the data for each FI curve
        Istep = datas[0][:, 0]
        FRavg = datas[0][:, 3]
        FRavgSD = datas[0][:, 4]
        FRbase = datas[0][:, 5]
        FRbaseSD = datas[0][:, 6]
        FRpeak = datas[0][:, 9]
        FRpeakSD = datas[0][:, 10]
        FRsteady = datas[0][:, 12]
        FRsteadySD = datas[0][:, 13]

        #   plot data for each FI curve
        axarrBase.plot(Istep, FRbase, color=cmapGray[counter])
        # axarrBase.fill_between(Istep, FRbase+FRbaseSD, FRbase-FRbaseSD, color=cmapGray[counter], alpha=0.2)
        axarrPeak.plot(Istep, FRpeak, color=cmapGray[counter])
        # axarrPeak.fill_between(Istep, FRpeak+FRpeakSD, FRpeak-FRpeakSD, color=cmapGray[counter], alpha=0.2)
        axarrSteady.plot(Istep, FRsteady, color=cmapGray[counter])
        # axarrSteady.fill_between(Istep, FRsteady+FRsteadySD, FRsteady-FRsteadySD, color=cmapGray, alpha=0.2)
        axarrAvg.plot(Istep, FRavg, color=cmapGray[counter])
        # axarrAvg.fill_between(Istep, FRavg+FRavgSD, FRavg-FRavgSD, color=cmapGray[counter], alpha=0.2)

        #   set limits
        axarrPeak.set_ylim(0, 250)
        axarrSteady.set_ylim(0, 200)
        axarrAvg.set_ylim(0, 200)
        axarrBase.set_ylim(0, 200)

        #   set limits
        axarrPeak.set_xlim(-0.8, 0)
        axarrSteady.set_xlim(-0.8, 0)
        axarrAvg.set_xlim(-0.8, 0)
        axarrBase.set_xlim(-0.8, 0)

        #   provide axis labels
        axarrBase.set_xlabel('Current [nA]')
        axarrBase.set_ylabel('Frequency [Hz]')
        axarrPeak.set_xlabel('Current [nA]')
        axarrPeak.set_ylabel('Frequency [Hz]')
        axarrAvg.set_xlabel('Current [nA]')
        axarrAvg.set_ylabel('Frequency [Hz]')
        axarrSteady.set_xlabel('Current [nA]')
        axarrSteady.set_ylabel('Frequency [Hz]')

        #   provide other labels
        axarrPeak.text(0.10,
                       0.90,
                       " ".join(['Apteronotus leptorhynchus', expfolder]),
                       transform=axarrPeak.transAxes,
                       fontsize=10)
        axarrPeak.text(0.10,
                       0.80,
                       " ".join(
                           ['DC current:', metas[0]["Status"]["Current-1"]]),
                       transform=axarrPeak.transAxes,
                       fontsize=10)
        axarrPeak.text(0.05,
                       0.70,
                       " ".join(['Peak current']),
                       transform=axarrPeak.transAxes,
                       fontsize=10)
        axarrAvg.text(0.05,
                      0.70,
                      " ".join(['Average current']),
                      transform=axarrAvg.transAxes,
                      fontsize=10)
        axarrSteady.text(0.05,
                         0.70,
                         " ".join(['Steady current']),
                         transform=axarrSteady.transAxes,
                         fontsize=10)
        axarrBase.text(0.05,
                       0.70,
                       " ".join(['Baseline current']),
                       transform=axarrBase.transAxes,
                       fontsize=10)

        #   provide gVgate values
        axarrBase.text(0.05,
                       0.05 + 0.05 * counter,
                       " ".join(['g = ', metas[0]["Status"]["gvgate"]]),
                       color=cmapGray[counter],
                       transform=axarrBase.transAxes,
                       fontsize=10)
        axarrPeak.text(0.05,
                       0.05 + 0.05 * counter,
                       " ".join(['g = ', metas[0]["Status"]["gvgate"]]),
                       color=cmapGray[counter],
                       transform=axarrPeak.transAxes,
                       fontsize=10)
        axarrSteady.text(0.05,
                         0.05 + 0.05 * counter,
                         " ".join(['g = ', metas[0]["Status"]["gvgate"]]),
                         color=cmapGray[counter],
                         transform=axarrSteady.transAxes,
                         fontsize=10)
        axarrAvg.text(0.05,
                      0.05 + 0.05 * counter,
                      " ".join(['g = ', metas[0]["Status"]["gvgate"]]),
                      color=cmapGray[counter],
                      transform=axarrAvg.transAxes,
                      fontsize=10)

        #   provide gvtau values
        axarrBase.text(0.55,
                       0.05 + 0.05 * counter,
                       " ".join(['g = ', metas[0]["Status"]["vgatetau"]]),
                       color=cmapGray[counter],
                       transform=axarrBase.transAxes,
                       fontsize=10)
        axarrPeak.text(0.55,
                       0.05 + 0.05 * counter,
                       " ".join(['g = ', metas[0]["Status"]["vgatetau"]]),
                       color=cmapGray[counter],
                       transform=axarrPeak.transAxes,
                       fontsize=10)
        axarrSteady.text(0.55,
                         0.05 + 0.05 * counter,
                         " ".join(['g = ', metas[0]["Status"]["vgatetau"]]),
                         color=cmapGray[counter],
                         transform=axarrSteady.transAxes,
                         fontsize=10)
        axarrAvg.text(0.55,
                      0.05 + 0.05 * counter,
                      " ".join(['g = ', metas[0]["Status"]["vgatetau"]]),
                      color=cmapGray[counter],
                      transform=axarrAvg.transAxes,
                      fontsize=10)

        #   counter increase
        counter = counter + 1

    #   Save figures
    FHandles.savefig(ppjoin(".".join([expfolder, "dyn_FI_curves", 'png'])),
                     transparent=True)
    FHandles.savefig(ppjoin(".".join([expfolder, "dyn_FI_curves", 'svg'])),
                     transparent=True)
Esempio n. 22
0
    def _make_tuples(self, key):
        repro = 'SAM'
        basedir = BASEDIR + key['cell_id']
        spikefile = basedir + '/samallspikes1.dat'
        if os.path.isfile(spikefile):
            stimuli = load(basedir + '/stimuli.dat')
            traces = load_traces(basedir, stimuli)
            spikes = load(spikefile)
            spi_meta, spi_key, spi_data = spikes.selectall()
            globalefield = Runs.GlobalEFieldPeaksTroughs()
            localeodpeaks = Runs.LocalEODPeaksTroughs()
            localeod = Runs.LocalEOD()
            globaleod = Runs.GlobalEOD()
            globaleodpeaks = Runs.GlobalEODPeaksTroughs()
            spike_table = Runs.SpikeTimes()
            # v1trace = Runs.VoltageTraces()

            for (spi_d, spi_m) in zip(spi_data, spi_meta):
                run_idx = spi_m['index']

                print("\t%s run %i" % (repro, run_idx))

                # match index from stimspikes with run from stimuli.dat
                try:
                    stim_m, stim_k, stim_d = stimuli.subkey_select(
                        RePro=repro, Run=spi_m['index'])
                except EmptyException:
                    warn('Empty stimuli for ' + repr(spi_m))
                    continue

                if len(stim_m) > 1:
                    raise KeyError(
                        '%s and index are not unique to identify stimuli.dat block.'
                        % (repro, ))
                else:
                    stim_k = stim_k[0]
                    stim_m = stim_m[0]
                    signal_column = \
                        [i for i, k in enumerate(stim_k) if k[:4] == ('stimulus', 'GlobalEField', 'signal', '-')][0]

                    valid = []

                    if stim_d == [[[0]]]:
                        print("\t\tEmpty stimuli data! Continuing ...")
                        continue

                    for d in stim_d[0]:
                        if not d[signal_column].startswith(
                                'FileStimulus-value'):
                            valid.append(d)
                        else:
                            print(
                                "\t\tExcluding a reset trial from stimuli.dat")
                    stim_d = valid

                if len(stim_d) != len(spi_d):
                    print(
                        """\t\t%s index %i has %i trials, but stimuli.dat has %i. Trial was probably aborted. Not including data."""
                        % (spikefile, spi_m['index'], len(spi_d), len(stim_d)))
                    continue

                start_index, index = [(i, k[-1]) for i, k in enumerate(stim_k)
                                      if 'traces' in k and 'V-1' in k][0]
                sample_interval, time_unit = get_number_and_unit(
                    stim_m['analog input traces']['sample interval%i' %
                                                  (index, )])

                # make sure that everything was sampled with the same interval
                sis = []
                for jj in range(1, 5):
                    si, tu = get_number_and_unit(
                        stim_m['analog input traces']['sample interval%i' %
                                                      (jj, )])
                    assert tu == 'ms', 'Time unit is not ms anymore!'
                    sis.append(si)
                assert len(
                    np.unique(sis)) == 1, 'Different sampling intervals!'

                duration = ureg.parse_expression(
                    spi_m['Settings']['Stimulus']['duration']).to(
                        time_unit).magnitude

                if 'ampl' in spi_m['Settings']['Stimulus']:
                    harmonics = np.array(
                        list(
                            map(
                                float, spi_m['Settings']['Stimulus']
                                ['ampl'].strip().split(','))))
                    if np.all(harmonics == 0):
                        nharmonics = 0
                    else:
                        nharmonics = len(harmonics)
                else:
                    nharmonics = 0

                start_idx, stop_idx = [], []
                # start_times, stop_times = [], []

                start_indices = [d[start_index] for d in stim_d]
                for begin_index, trial in zip(start_indices, spi_d):
                    # start_times.append(begin_index*sample_interval)
                    # stop_times.append(begin_index*sample_interval + duration)
                    start_idx.append(begin_index)
                    stop_idx.append(begin_index + duration / sample_interval)

                to_insert = dict(key)
                to_insert['run_id'] = spi_m['index']
                to_insert['delta_f'] = float(
                    spi_m['Settings']['Stimulus']['deltaf'][:-2])
                to_insert['contrast'] = float(
                    spi_m['Settings']['Stimulus']['contrast'][:-1])
                to_insert['eod'] = float(spi_m['EOD rate'][:-2])
                to_insert[
                    'duration'] = duration / 1000 if time_unit == 'ms' else duration
                to_insert['am'] = spi_m['Settings']['Stimulus']['am'] * 1
                to_insert[
                    'samplingrate'] = 1 / sample_interval * 1000 if time_unit == 'ms' else 1 / sample_interval
                fs = to_insert['samplingrate']
                to_insert['n_harmonics'] = nharmonics
                to_insert['repro'] = 'SAM'

                self.insert1(to_insert)
                for trial_idx, (start,
                                stop) in enumerate(zip(start_idx, stop_idx)):
                    tmp = dict(run_id=run_idx,
                               trial_id=trial_idx,
                               repro='SAM',
                               **key)
                    # tmp['membrane_potential'] = traces['V-1']['data'][start:stop]
                    # # v1trace.insert1(tmp, replace=True)
                    # del tmp['membrane_potential']
                    # ---
                    global_efield = traces['GlobalEFie']['data'][
                        start:stop].astype(np.float32)
                    if fs > 2 * LOWPASS_CUTOFF:
                        global_efield = butter_lowpass_filter(
                            global_efield,
                            highcut=LOWPASS_CUTOFF,
                            fs=fs,
                            order=5)
                    _, peaks, _, troughs = peakdet(global_efield)
                    globalefield.insert1(dict(tmp,
                                              peaks=peaks,
                                              troughs=troughs),
                                         ignore_extra_fields=True)

                    # ---
                    local_efield = traces['LocalEOD-1']['data'][
                        start:stop].astype(np.float32)
                    localeod.insert1(dict(tmp, local_efield=local_efield),
                                     ignore_extra_fields=True)
                    if fs > 2 * LOWPASS_CUTOFF:
                        local_efield = butter_lowpass_filter(
                            local_efield,
                            highcut=LOWPASS_CUTOFF,
                            fs=fs,
                            order=5)
                    _, peaks, _, troughs = peakdet(local_efield)
                    localeodpeaks.insert1(dict(tmp,
                                               peaks=peaks,
                                               troughs=troughs),
                                          ignore_extra_fields=True)

                    # ---
                    global_voltage = traces['EOD']['data'][start:stop].astype(
                        np.float32)
                    globaleod.insert1(dict(tmp, global_voltage=global_voltage),
                                      ignore_extra_fields=True)
                    if fs > 2 * LOWPASS_CUTOFF:
                        global_voltage = butter_lowpass_filter(
                            global_voltage,
                            highcut=LOWPASS_CUTOFF,
                            fs=fs,
                            order=5)
                    _, peaks, _, troughs = peakdet(global_voltage)
                    globaleodpeaks.insert1(dict(tmp,
                                                peaks=peaks,
                                                troughs=troughs),
                                           ignore_extra_fields=True)

                    # ---
                    tmp['times'] = spi_d[trial_idx]
                    spike_table.insert1(tmp, ignore_extra_fields=True)
Esempio n. 23
0
    def _make_tuples(self, key):
        repro = 'SAM'
        basedir = BASEDIR + key['cell_id']
        spikefile = basedir + '/samallspikes1.dat'
        if os.path.isfile(spikefile):
            stimuli = load(basedir + '/stimuli.dat')
            traces = load_traces(basedir, stimuli)
            spikes = load(spikefile)
            spi_meta, spi_key, spi_data = spikes.selectall()

            globalefield = Runs.GlobalEField()
            localeod = Runs.LocalEOD()
            globaleod = Runs.GlobalEOD()
            spike_table = Runs.SpikeTimes()
            v1trace = Runs.VoltageTraces()

            for run_idx, (spi_d, spi_m) in enumerate(zip(spi_data, spi_meta)):
                print("\t%s run %i" % (repro, run_idx))

                # match index from stimspikes with run from stimuli.dat
                stim_m, stim_k, stim_d = stimuli.subkey_select(RePro=repro, Run=spi_m['index'])

                if len(stim_m) > 1:
                    raise KeyError('%s and index are not unique to identify stimuli.dat block.' % (repro,))
                else:
                    stim_k = stim_k[0]
                    stim_m = stim_m[0]
                    signal_column = \
                        [i for i, k in enumerate(stim_k) if k[:4] == ('stimulus', 'GlobalEField', 'signal', '-')][0]

                    valid = []

                    if stim_d == [[[0]]]:
                        print("\t\tEmpty stimuli data! Continuing ...")
                        continue

                    for d in stim_d[0]:
                        if not d[signal_column].startswith('FileStimulus-value'):
                            valid.append(d)
                        else:
                            print("\t\tExcluding a reset trial from stimuli.dat")
                    stim_d = valid

                if len(stim_d) != len(spi_d):
                    print(
                        """\t\t%s index %i has %i trials, but stimuli.dat has %i. Trial was probably aborted. Not including data.""" % (
                            spikefile, spi_m['index'], len(spi_d), len(stim_d)))
                    continue

                start_index, index = [(i, k[-1]) for i, k in enumerate(stim_k) if 'traces' in k and 'V-1' in k][0]
                sample_interval, time_unit = get_number_and_unit(
                    stim_m['analog input traces']['sample interval%i' % (index,)])

                # make sure that everything was sampled with the same interval
                sis = []
                for jj in range(1, 5):
                    si, tu = get_number_and_unit(stim_m['analog input traces']['sample interval%i' % (jj,)])
                    assert tu == 'ms', 'Time unit is not ms anymore!'
                    sis.append(si)
                assert len(np.unique(sis)) == 1, 'Different sampling intervals!'

                duration = ureg.parse_expression(spi_m['Settings']['Stimulus']['duration']).to(time_unit).magnitude

                if 'ampl' in spi_m['Settings']['Stimulus']:
                    nharmonics = len(list(map(float, spi_m['Settings']['Stimulus']['ampl'].strip().split(','))))
                else:
                    nharmonics = 0

                start_idx, stop_idx = [], []
                # start_times, stop_times = [], []

                start_indices = [d[start_index] for d in stim_d]
                for begin_index, trial in zip(start_indices, spi_d):
                    # start_times.append(begin_index*sample_interval)
                    # stop_times.append(begin_index*sample_interval + duration)
                    start_idx.append(begin_index)
                    stop_idx.append(begin_index + duration / sample_interval)

                to_insert = dict(key)
                to_insert['run_id'] = spi_m['index']
                to_insert['delta_f'] = float(spi_m['Settings']['Stimulus']['deltaf'][:-2])
                to_insert['contrast'] = float(spi_m['Settings']['Stimulus']['contrast'][:-1])
                to_insert['eod'] = float(spi_m['EOD rate'][:-2])
                to_insert['duration'] = duration / 1000 if time_unit == 'ms' else duration
                to_insert['am'] = spi_m['Settings']['Stimulus']['am'] * 1
                to_insert['samplingrate'] = 1 / sample_interval * 1000 if time_unit == 'ms' else 1 / sample_interval
                to_insert['n_harmonics'] = nharmonics
                to_insert['repro'] = 'SAM'

                self.insert1(to_insert)
                for trial_idx, (start, stop) in enumerate(zip(start_idx, stop_idx)):
                    tmp = dict(run_id=run_idx, trial_id=trial_idx, repro='SAM', **key)
                    tmp['membrane_potential'] = traces['V-1']['data'][start:stop]
                    v1trace.insert1(tmp, replace=True)
                    del tmp['membrane_potential']

                    tmp['global_efield'] = traces['GlobalEFie']['data'][start:stop]
                    globalefield.insert1(tmp, replace=True)
                    del tmp['global_efield']

                    tmp['local_efield'] = traces['LocalEOD-1']['data'][start:stop]
                    localeod.insert1(tmp, replace=True)
                    del tmp['local_efield']

                    tmp['global_voltage'] = traces['EOD']['data'][start:stop]
                    globaleod.insert1(tmp, replace=True)
                    del tmp['global_voltage']

                    tmp['times'] = spi_d[trial_idx]
                    spike_table.insert1(tmp, replace=True)
Esempio n. 24
0
def add_spikes(stimuli, spikefile, spike_times, nix_file, nix_spiketimes):
    if 'stimspikes' in spikefile:
        repro = 'FileStimulus'
    elif 'samallspikes' in spikefile:
        repro = 'SAM'
    else:
        raise Exception('Cannot determine repro')

    print "Assuming RePro=%s" % (repro, )


    ureg = UnitRegistry()

    spikes = load(spikefile)
    spi_meta, spi_key, spi_data = spikes.selectall()


    for run_idx, (spi_d, spi_m) in enumerate(zip(spi_data, spi_meta)):
        print "\t%s run %i" % (repro, run_idx)

        if repro == 'FileStimulus':
            spi_m = add_stimulus_meta(spi_m)
        # match index from stimspikes with run from stimuli.dat
        stim_m, stim_k, stim_d = stimuli.subkey_select(RePro=repro, Run=spi_m['index'])

        if len(stim_m) > 1:
            raise KeyError('%s and index are not unique to identify stimuli.dat block.' % (repro, ))
        else:
            stim_k = stim_k[0]
            stim_m = stim_m[0]
            signal_column = [i for i,k in enumerate(stim_k) if k[:4] == ('stimulus', 'GlobalEField', 'signal', '-')][0]

            valid = []

            if stim_d == [[[0]]]:
                print("\t\tEmpty stimuli data! Continuing ...")
                continue

            for d in stim_d[0]:
                if not d[signal_column].startswith('FileStimulus-value'):
                    valid.append(d)
                else:
                    print("\t\tExcluding a reset trial from stimuli.dat")
            stim_d = valid


        if len(stim_d) != len(spi_d):
            print("""\t\t%s index %i has %i trials, but stimuli.dat has %i. Trial was probably aborted. Not including data.""" % (spikefile, spi_m['index'], len(spi_d), len(stim_d)))
            continue



        start_index, index = [(i, k[-1]) for i,k in enumerate(stim_k) if 'traces' in k and 'V-1' in k][0]
        sample_interval, time_unit = get_number_and_unit(stim_m['analog input traces']['sample interval%i' % (index,)])

        if repro == 'FileStimulus':
            duration = ureg.parse_expression(spi_m['duration']).to(time_unit).magnitude
        elif repro == 'SAM':
            duration = ureg.parse_expression(spi_m['Settings']['Stimulus']['duration']).to(time_unit).magnitude

        start_times = []

        start_indices = [d[start_index] for d in stim_d]
        for begin_index, trial in zip(start_indices, spi_d):
            start_time = begin_index*sample_interval
            start_times.append(start_time)
            spike_times.append(trial+start_time)


        start_times = np.asarray(start_times)
        durations = duration * np.ones(len(stim_d))


        tag_name = "%s-run-%i" % (repro, run_idx)
        positions = recording_block.create_data_array(tag_name+'_starts','nix.event.position', nix.DataType.Double, start_times.shape)
        positions.data.write_direct(start_times)
        positions.append_set_dimension()

        extents = recording_block.create_data_array(tag_name+'_extents','nix.event.extents', nix.DataType.Double, durations.shape)
        extents.data.write_direct(durations)
        extents.append_set_dimension()

        tag = recording_block.create_multi_tag(tag_name, 'nix.experiment_run', positions)
        tag.extents = extents
        tag.references.append(nix_spiketimes)


        for nt in nix_traces:
            tag.references.append(nt)

        sec = nix_file.create_section(tag_name, "nix.metadata")
        tag.metadata = sec

        insert_metadata(sec, stim_m)
        insert_metadata(sec, spi_m)
Esempio n. 25
0
def fi_curve(path_to_folder, subfolder, info):

    """"
    Plots the FI curve as computed by Relacs into the graphic file (*.png, *.svg)
        Requires:
    :param path_to_folder: location of the experimental folder
    :param subfolder: name of the experimental folder
        Outputs:
            graphic files containing FI plots (rates and PSTH) as .png and .svg
    """
    #   define the input data
    filename1 = ppjoin(path_to_folder, subfolder, "ficurve-data.dat")
    filename2 = ppjoin(path_to_folder, subfolder, "ficurve-rates.dat")

    #   get the file, extract the three data subunits
    relacs_file1 = load(filename1)
    relacs_file2 = load(filename2)

    #   if relacs file is empty or too short due to aborted RePro presentation
    #   "try:" provides normal termination of the loop instead of error
    try:
        metas1, _, datas1 = relacs_file1.selectall()
        metas2, _, datas2 = relacs_file2.selectall()

    except:
        return None

    ReProIxList = []
    for i in range(0,len(metas2)):
        comments2 = metas2[i]
        # extract the unique values only one time
        ReProIxList.append(comments2["ReProIndex"])

    # count occurences of same ReProIx
    ReProIx = [len(list(group)) for key, group in groupby(ReProIxList)]
    ReProIx.insert(0,0) # insert zero for easier job
    # print(ReProIx)

    #   count the iterations
    FIiter = len(datas1)
    print(FIiter)

     #   empty list for filename output
    fnames = []
    counter = 0
    for i in range(0,FIiter):

        #   print processed RePro iteration
        print("FI Curve ReProIx", metas1[i]["ReProIndex"])

        # TODO: test figure output with a higher dpi settings (220 for retina display, 300 for print)
        FHandles = plt.figure(figsize=(10, 4))#, dpi=220)
        #   counter increase
        counter = counter + ReProIx[i]
        #   convert the loaded data structures into an appropriate format
        rates1 = np.array(datas1[i])
        rates2 = np.array(datas2[counter: counter + ReProIx[i+1]])
        datas1[i] = None
        datas2[i] = None
        comments1 = metas1[i]

        #   FI frequencies - select data
        #   select the data columns

        Istep = rates1[:, 0]
        FRavg = rates1[:, 3]
        FRavgSD = rates1[:, 4]
        FRbase = rates1[:, 5]
        FRbaseSD = rates1[:, 6]
        FRpeak = rates1[:, 9]
        FRpeakSD = rates1[:, 10]
        FRsteady = rates1[:, 12]
        FRsteadySD = rates1[:, 13]

        #   defines FI rates axis, number and position
        axarrL = FHandles.add_subplot(1, 2, 1)

        #   plots FI curves into the axis
        axarrL.plot(Istep, FRbase, color='b')
        axarrL.fill_between(Istep, FRbase+FRbaseSD, FRbase-FRbaseSD, color='b', alpha=0.2)
        axarrL.plot(Istep, FRpeak, color='r')
        axarrL.fill_between(Istep, FRpeak+FRpeakSD, FRpeak-FRpeakSD, color='r', alpha=0.2)
        axarrL.plot(Istep, FRsteady, color='g')
        axarrL.fill_between(Istep, FRsteady+FRsteadySD, FRsteady-FRsteadySD, color='g', alpha=0.2)
        axarrL.plot(Istep, FRavg, color='m')
        axarrL.fill_between(Istep, FRavg+FRavgSD, FRavg-FRavgSD, color='m', alpha=0.2)

        #   plot comments
        axarrL.text(0.05, 0.95, " ".join(['Species:', info["Subject"]["Species"]]), transform=axarrL.transAxes, fontsize = 10)
        axarrL.text(0.05, 0.90, " ".join(['ELL Segment:', info["Cell"]["Location"]]), transform=axarrL.transAxes, fontsize = 10)
        axarrL.text(0.05, 0.85, " ".join(['RePro Ix:', str(comments1["ReProIndex"])]), transform=axarrL.transAxes, fontsize=10)
        axarrL.text(0.05, 0.80, " ".join(['RePro time:', str(comments1["ReProTime"])]), transform=axarrL.transAxes, fontsize=10)
        axarrL.text(0.05, 0.75, " ".join(['V_rest:', str(np.round(np.average(rates1[:, 7]))), 'mV']), transform=axarrL.transAxes, fontsize=10)
        axarrL.text(0.05, 0.70, " ".join(['DC current:', str(rates1[0, 1]), 'nA']), transform=axarrL.transAxes, fontsize=10)
        axarrL.text(0.05, 0.65, " ".join(['Amp. mode:', comments1["Status"]["AmplifierMode"]]), transform=axarrL.transAxes, fontsize=10)

        if "SyncPulse" in comments1["Status"].keys():
            axarrL.text(0.40, 0.60, " ".join(['SyncPulse:', comments1["Status"]["SyncPulse"]]), transform=axarrL.transAxes, fontsize=10)
            axarrL.text(0.40, 0.40, " ".join(['g:', comments1["Status"]["g"]]), transform=axarrL.transAxes, fontsize=10)
            axarrL.text(0.40, 0.35, " ".join(['E:', comments1["Status"]["E"]]), transform=axarrL.transAxes, fontsize=10)
            axarrL.text(0.40, 0.30, " ".join(['gVgate:', comments1["Status"]["gvgate"]]), transform=axarrL.transAxes, fontsize=10)
            axarrL.text(0.40, 0.25, " ".join(['EVgate:', comments1["Status"]["Evgate"]]), transform=axarrL.transAxes, fontsize=10)
            axarrL.text(0.40, 0.20, " ".join(['VgateTau:', comments1["Status"]["vgatetau"]]), transform=axarrL.transAxes, fontsize=10)
            axarrL.text(0.40, 0.15, " ".join(['VgateMid:', comments1["Status"]["vgatevmid"]]), transform=axarrL.transAxes, fontsize=10)
            axarrL.text(0.40, 0.10, " ".join(['VgateSlope:', comments1["Status"]["vgateslope"]]), transform=axarrL.transAxes, fontsize=10)


        #   defines FI PSTH axis, number and position
        axarrR = FHandles.add_subplot(1, 2, 2)

        #   plots FI PSTH
        for j in range(0, len(rates2)):
            axarrR.plot(rates2[j][:, 0]/1000, rates2[j][:, 1])#, line_color=colors[j])

        #   sets y label on the left plot
        axarrL.set_ylabel('Frequency [Hz]')
        axarrR.set_yticklabels([], visible=False)

        #   sets y lim on both plots
        axarrL.set_ylim(axarrR.get_ylim())

        #   writes titles, only over the top figures
        axarrL.set_title(subfolder + ": " + "FI rates", fontsize=12)
        axarrR.set_title(subfolder + ": " + "FI PSTH", fontsize=12)

        #   writes labels beside the bottom most plot
        axarrL.set_xlabel('Current [nA]')
        axarrR.set_xlabel('time [s]')

        #   define file name
        filename = "".join([str(metas1[i]["ReProIndex"]),'_', 'FI_curve'])
        fnames.append(filename)

        FHandles.savefig(ppjoin(path_to_folder, subfolder, ".".join([filename, 'png'])), transparent=True)
        FHandles.savefig(ppjoin(path_to_folder, subfolder, ".".join([filename, 'svg'])), transparent=True)

        plt.close()

    return fnames
Esempio n. 26
0
if __name__ == "__main__":

    """
    Extracts the spikes from traces. One can use the raw trace or normal (txt) trace. Does not work (yet) with multiple
    iterations of the same stimulus
    Input: list with the repro indexes
    example: spike_extraction -l [2,5,9]
    """


    #   prepare files and metas et al.

    filename = "transferfunction-data.dat"
    filename2 = "transferfunction-traces.dat"
    relacs_file = load(filename)
    relacs_file2 = load(filename2)
    metas, _, _ = relacs_file.selectall()

    wd = getcwd().split(getcwd().split("/")[-1])[0]
    expfolder = getcwd().split("/")[-1]

    # prepare_file(expfolder, fn="stimulus-whitenoise-spikes.dat")

    ReProIxList = command_interpreter(sys.argv[1:])
    if ReProIxList:
        ReProIxList = ReProIxList['li']
    else:
        ReProIxList = [metas[i]["ReProIndex"] for i in range(len(metas))]

    #   voltage threshold
Esempio n. 27
0
    def _make_tuples(self, key):
        print(key)
        repro = 'BaselineActivity'
        basedir = BASEDIR + key['cell_id']
        spikefile = basedir + '/basespikes1.dat'
        if os.path.isfile(spikefile):
            stimuli = load(basedir + '/stimuli.dat')

            traces = load_traces(basedir, stimuli)
            spikes = load(spikefile)
            spi_meta, spi_key, spi_data = spikes.selectall()

            localeod = Baseline.LocalEODPeaksTroughs()
            spike_table = Baseline.SpikeTimes()

            for run_idx, (spi_d, spi_m) in enumerate(zip(spi_data, spi_meta)):
                print("\t%s repeat %i" % (repro, run_idx))

                # match index from stimspikes with run from stimuli.dat
                stim_m, stim_k, stim_d = stimuli.subkey_select(
                    RePro=repro, Run=spi_m['index'])

                if len(stim_m) > 1:
                    raise KeyError(
                        '%s and index are not unique to identify stimuli.dat block.'
                        % (repro, ))
                else:
                    stim_k = stim_k[0]
                    stim_m = stim_m[0]
                    signal_column = \
                        [i for i, k in enumerate(stim_k) if k[:4] == ('stimulus', 'GlobalEField', 'signal', '-')][0]

                    valid = []

                    if stim_d == [[[0]]]:
                        print("\t\tEmpty stimuli data! Continuing ...")
                        continue

                    for d in stim_d[0]:
                        if not d[signal_column].startswith(
                                'FileStimulus-value'):
                            valid.append(d)
                        else:
                            print(
                                "\t\tExcluding a reset trial from stimuli.dat")
                    stim_d = valid

                if len(stim_d) > 1:
                    print(
                        """\t\t%s index %i has more one trials. Not including data."""
                        % (spikefile, spi_m['index'], len(spi_d), len(stim_d)))
                    continue

                start_index, index = [(i, k[-1]) for i, k in enumerate(stim_k)
                                      if 'traces' in k and 'V-1' in k][0]
                sample_interval, time_unit = get_number_and_unit(
                    stim_m['analog input traces']['sample interval%i' %
                                                  (index, )])

                # make sure that everything was sampled with the same interval
                sis = []
                for jj in range(1, 5):
                    si, tu = get_number_and_unit(
                        stim_m['analog input traces']['sample interval%i' %
                                                      (jj, )])
                    assert tu == 'ms', 'Time unit is not ms anymore!'
                    sis.append(si)
                assert len(
                    np.unique(sis)) == 1, 'Different sampling intervals!'

                duration = ureg.parse_expression(
                    spi_m['duration']).to(time_unit).magnitude

                start_idx, stop_idx = [], []
                # start_times, stop_times = [], []

                start_indices = [d[start_index] for d in stim_d]
                for begin_index, trial in zip(start_indices, spi_d):
                    start_idx.append(begin_index)
                    stop_idx.append(begin_index + duration / sample_interval)

                to_insert = dict(key)
                to_insert['repeat'] = spi_m['index']
                to_insert['eod'] = float(spi_m['EOD rate'][:-2])
                to_insert[
                    'duration'] = duration / 1000 if time_unit == 'ms' else duration
                to_insert[
                    'samplingrate'] = 1 / sample_interval * 1000 if time_unit == 'ms' else 1 / sample_interval

                self.insert1(to_insert)

                for trial_idx, (start,
                                stop) in enumerate(zip(start_idx, stop_idx)):
                    if start > 0:
                        tmp = dict(key, repeat=spi_m['index'])
                        leod = traces['LocalEOD-1']['data'][start:stop]
                        if to_insert['samplingrate'] > 2 * LOWPASS_CUTOFF:
                            print('\tLowpass filter to', LOWPASS_CUTOFF,
                                  'Hz for peak detection')
                            leod = butter_lowpass_filter(
                                leod,
                                highcut=LOWPASS_CUTOFF,
                                fs=to_insert['samplingrate'],
                                order=5)
                        _, tmp['peaks'], _, tmp['troughs'] = peakdet(leod)
                        localeod.insert1(tmp, replace=True)
                    else:
                        print(
                            "Negative indices in stimuli.dat. Skipping local peak extraction!"
                        )

                    spike_table.insert1(dict(key,
                                             times=spi_d,
                                             repeat=spi_m['index']),
                                        replace=True)
Esempio n. 28
0
        print helptxt
        sys.exit(2)

    # overwrite with parameters
    for opt, arg in opts:
        if opt == '-h':
            print helptxt
            sys.exit()

    relacsdir, nix_filename = args
    if relacsdir[-1] != '/':
        relacsdir += '/'

    nix_file = nix.File.open(nix_filename, nix.FileMode.Overwrite)

    stimuli = load(relacsdir + 'stimuli.dat')
    spike_times = []
    recordings_block_name = [e.strip() for e in relacsdir.split('/') if e][-1]

    #------------ add traces -------------------

    nix_traces = add_traces(relacsdir, stimuli, nix_file,
                            recordings_block_name)
    recording_block = [
        k for k in nix_file.blocks if k.name == recordings_block_name
    ][0]

    add_info(nix_file, relacsdir, recording_block)

    nix_spiketimes = recording_block.create_data_array('spikes',
                                                       'nix.event.spiketimes',
Esempio n. 29
0
def transferfunc(path_to_folder, subfolder, info):

    """
    Takes the file called "transferfunction-data.dat" and plots the values found there on the frequency axis. The printed data are "gain (+- SD), phase (+- SD) and coherence (+-SD).
    :param path_to_folder : folder where the experiment subfolders are stored
    :param subfolder : folder containing the experiment itself
    :param info : Data like cell location (map) etc.
    :return fnames : file name containing the figure names
    """

    #   define the input data
    filename1 = ppjoin(path_to_folder, subfolder, "transferfunction-data.dat")
    filename2 = ppjoin(path_to_folder, subfolder, "transferfunction-traces.dat")

    #   get the file, extract the three data subunits
    relacs_file1 = load(filename1)
    relacs_file2 = load(filename2)

    #   if relacs file is empty or too short due to aborted RePro presentation
    #   "try:" provides normal termination of the loop instead of error
    try:
        metas1, _, datas1 = relacs_file1.selectall()
        metas2, _, _ = relacs_file2.selectall()
    except:
        return None

    pass


    ReProIxList = []
    for i in range(0,len(metas1)):
        comments1 = metas1[i]
        # extract the unique values only one time
        ReProIxList.append(comments1["ReProIndex"])

    # count occurences of same ReProIx
    ReProIx = [len(list(group)) for key, group in groupby(ReProIxList)]
    ReProIx.insert(0,0) # insert zero for easier job
    # print(ReProIx)

    #   count the iterations
    transfIter = len(datas1)
    print(transfIter)

     #   empty list for filename output
    fnames = []
    counter = 0
    for i in range(0,transfIter):

        # TODO: test figure output with a higher dpi settings (220 for retina display, 300 for print)
        FHandles = plt.figure(figsize=(10, 8))#, dpi=220)

        #   counter increase
        counter = counter + ReProIx[i]
        #   convert the loaded data structures into an appropriate format
        rates1 = np.array(datas1[i])
        datas1[i] = None
        comments1 = metas1[i]
        comments2 = metas2[i]

        #   Frequency limit (only frequency band used in the experiment
        freqLim = float(comments2["Settings"]["Stimulus"]["fmax"].rstrip("Hz"))

        #   TF values - select data
        #   select the data columns
        freq = rates1[:, 0]
        freq = freq[freq<freqLim]
        gain = rates1[0:len(freq), 1]
        gainSD = rates1[0:len(freq), 2]
        phase = rates1[0:len(freq), 3]
        phaseSD = rates1[0:len(freq), 4]
        coh = rates1[0:len(freq), 5]
        cohSD = rates1[0:len(freq), 6]

        #   axes definitions
        axarr = FHandles.add_axes([0.1, 0.5, 0.8, 0.40])
        # axarr = FHandles.add_subplot(1, 1, 1)
        axarr2 = axarr.twinx()
        axarrR = FHandles.add_axes([0.1, 0.1, 0.8, 0.40])

        #   plots transfer curves into the axis
        axarr.plot(freq, gain, color='b', label = "gain")
        axarr.fill_between(freq, gain+gainSD, gain-gainSD, color='b', alpha=0.2)
        axarrR.plot(freq, phase, color='g', label = "phase")
        axarrR.fill_between(freq, phase+phaseSD, phase-phaseSD, color='g', alpha=0.2)
        axarr2.plot(freq, coh, color='r', label = "coherence")
        axarr2.fill_between(freq, coh+cohSD, coh-cohSD, color='r', alpha=0.2)

        #   plot comments
        axarr.text(0.25, 0.95, " ".join(['Species:', info["Subject"]["Species"]]), transform=axarr.transAxes, fontsize = 10)
        axarr.text(0.05, 0.90, " ".join(['ELL Segment:', info["Cell"]["Location"]]), transform=axarr.transAxes, fontsize = 10)
        axarr.text(0.05, 0.85, " ".join(['RePro Ix:', str(comments1["ReProIndex"])]), transform=axarr.transAxes, fontsize=10)
        axarr.text(0.05, 0.80, " ".join(['RePro time:', str(comments1["ReProTime"])]), transform=axarr.transAxes, fontsize=10)
        axarr.text(0.05, 0.75, " ".join(['Amp. mode:', comments1["Status"]["AmplifierMode"]]), transform=axarr.transAxes, fontsize=10)
        axarr.text(0.05, 0.70, " ".join(['Offset:', comments1["Settings"]["Stimulus"]["offset"]]), transform=axarr.transAxes, fontsize=10)
        axarr.text(0.05, 0.65, " ".join(['Amplitude:', comments1["Settings"]["Stimulus"]["amplitude"]]), transform=axarr.transAxes, fontsize=10)
        if "SyncPulse" in comments1["Status"].keys():
            axarr.text(0.05, 0.60, " ".join(['DynClamp:', comments1["Status"]["SyncPulse"]]), transform=axarr.transAxes, fontsize=10)
            axarr.text(0.40, 0.40, " ".join(['g:', comments1["Status"]["g"]]), transform=axarr.transAxes, fontsize=10)
            axarr.text(0.40, 0.35, " ".join(['E:', comments1["Status"]["E"]]), transform=axarr.transAxes, fontsize=10)
            axarr.text(0.40, 0.30, " ".join(['gVgate:', comments1["Status"]["gvgate"]]), transform=axarr.transAxes, fontsize=10)
            axarr.text(0.40, 0.25, " ".join(['EVgate:', comments1["Status"]["Evgate"]]), transform=axarr.transAxes, fontsize=10)
            axarr.text(0.40, 0.20, " ".join(['VgateTau:', comments1["Status"]["vgatetau"]]), transform=axarr.transAxes, fontsize=10)
            axarr.text(0.40, 0.15, " ".join(['VgateMid:', comments1["Status"]["vgatevmid"]]), transform=axarr.transAxes, fontsize=10)
            axarr.text(0.40, 0.10, " ".join(['VgateSlope:', comments1["Status"]["vgateslope"]]), transform=axarr.transAxes, fontsize=10)


        freqLim = float(comments2["Settings"]["Stimulus"]["fmax"].rstrip("Hz"))

        #   sets y label on the left plot
        axarr.set_ylabel('gain (mv/nA)', color = "b")
        axarr2.set_ylabel('coherence', color = "r")
        axarrR.set_ylabel('phase', color = "g")

        #   set x limit
        axarr.set_xlim(0, freqLim)

        #   writes titles, only over the top figures
        axarr.set_title(subfolder + ": " + "transfer function", fontsize=12)

        #   writes labels beside the bottom most plot
        axarrR.set_xlabel('Frequency [Hz]')
        axarr.set_xticklabels([], visible=False)
        axarr2.set_xticklabels([], visible=False)

        #   prepares the legend
        axarr.legend(loc=0)
        axarr2.legend(loc=4)

        #   define file name
        filename = "".join([str(comments1["ReProIndex"]),'_', 'transferfunc'])
        fnames.append(filename)

        #   set color for the y axis
        for tl in axarr2.get_yticklabels():
            tl.set_color("r")
        for tl in axarr.get_yticklabels():
            tl.set_color("b")


        FHandles.savefig(ppjoin(path_to_folder, subfolder, ".".join([filename, 'png'])), transparent=True)
        FHandles.savefig(ppjoin(path_to_folder, subfolder, ".".join([filename, 'svg'])), transparent=True)

        plt.close()

    return fnames
Esempio n. 30
0
def vi_curve(path_to_folder, subfolder, info):
    """"
    Plots the VI curve into the graphic file (*.png, *.svg)
    :param path_to_folder: location of the experimental folder
    :param subfolder: name of the experimental folder

    :return fnames: dict with VI figure filenames
    """
    #   define the input data
    filename1 = ppjoin(path_to_folder, subfolder, "vicurve-data.dat")
    filename2 = ppjoin(path_to_folder, subfolder, "vicurve-trace.dat")

    #   get the file, extract the three data subunits
    relacs_file1 = load(filename1)
    relacs_file2 = load(filename2)

    #   if relacs file is empty or too short due to aborted RePro presentation
    #   "try:" provides normal termination of the loop instead of error
    try:
        metas1, _, datas1 = relacs_file1.selectall(
        )  #   get data from vicurve-data
        metas2, _, datas2 = relacs_file2.selectall(
        )  #   get data from vicurve-trace

    except:
        return None

    ReProIxList = []
    for i in range(0, len(metas2)):
        comments2 = metas2[i]
        # extract the unique values only one time
        ReProIxList.append(comments2["ReProIndex"])

    # count occurences of same ReProIx
    ReProIx = [len(list(group)) for key, group in groupby(ReProIxList)]
    ReProIx.insert(0, 0)  # insert zero for easier job
    # print(ReProIx)

    #   count the iterations
    vi = np.array(datas1)
    VIiter = len(datas1)
    print(VIiter)

    #   setup plot dimension, 8" width, each RePro repetition gets 3" height
    # TODO: test figure output with a higher dpi settings (220 for retina display, 300 for print)
    # FHandles = plt.figure(figsize=(6, 3*VIiter))#, dpi=220)

    #   empty list for filename output
    fnames = []

    #   color scheme
    cmap = [
        "Blue", "DarkRed", "DarkOrange", "LimeGreen", "Gold", "Plum",
        "LightSlateGray", "Dodgerblue", "Violet", "Silver", "Black", "Green",
        "Blue", "DarkRed", "DarkOrange", "LimeGreen", "Gold", "Plum",
        "LightSlateGray", "Dodgerblue", "Violet", "Silver", "Black", "Green"
    ]

    #   prepares the counter
    counter = np.cumsum(ReProIx)
    #   loop over VI RePro iterations
    for i in range(0, len(counter) - 1):
        FHandles = plt.figure(figsize=(12, 3))  #, dpi=220)
        print(i)
        comments_vi = metas1[i]

        #   counter increase
        #  counter = counter + ReProIx[i]
        #   convert the loaded data structures into an appropriate format

        #   sorts traces
        traceDict, stim_amps = vi_traces_sort(
            metas2[counter[i]:counter[i + 1]],
            datas2[counter[i]:counter[i + 1]])

        #   defines VI rates axis, number and position
        axarr = FHandles.add_subplot(1, 3, 1)

        #   draws plot
        # TODO: fix the errorbar color
        axarr.errorbar(vi[i][:, 0],
                       vi[i][:, 7],
                       vi[i][:, 8],
                       color='red',
                       fmt='-o')  #   peak
        axarr.errorbar(vi[i][:, 0],
                       vi[i][:, 3],
                       vi[i][:, 4],
                       color='cyan',
                       fmt='-x')  #   rest
        axarr.errorbar(vi[i][:, 0],
                       vi[i][:, 5],
                       vi[i][:, 6],
                       color='yellow',
                       fmt='-+')  #   steady state
        axarr.errorbar(vi[i][:, 0],
                       vi[i][:, 10],
                       vi[i][:, 11],
                       color='green',
                       fmt='-')  #   onset peak

        #   plot comments
        axarr.text(0.05,
                   0.95,
                   " ".join(['Species:', info["Subject"]["Species"]]),
                   transform=axarr.transAxes,
                   fontsize=10)
        axarr.text(0.05,
                   0.90,
                   " ".join(['ELL Segment:', info["Cell"]["Location"]]),
                   transform=axarr.transAxes,
                   fontsize=10)
        axarr.text(0.05,
                   0.85,
                   " ".join(['RePro Ix:',
                             str(comments_vi["ReProIndex"])]),
                   transform=axarr.transAxes,
                   fontsize=10)
        axarr.text(0.05,
                   0.80,
                   " ".join(['RePro time:',
                             str(comments_vi["ReProTime"])]),
                   transform=axarr.transAxes,
                   fontsize=10)
        axarr.text(
            0.05,
            0.75,
            " ".join(['V_rest:',
                      str(np.round(np.average(vi[i][:, 3]))), 'mV']),
            transform=axarr.transAxes,
            fontsize=10)
        axarr.text(0.05,
                   0.70,
                   " ".join(['DC current:',
                             str(vi[i][0, 1]), 'nA']),
                   transform=axarr.transAxes,
                   fontsize=10)
        axarr.text(0.05,
                   0.65,
                   " ".join(
                       ['Amp. mode:', comments_vi["Status"]["AmplifierMode"]]),
                   transform=axarr.transAxes,
                   fontsize=10)
        if "SyncPulse" in comments_vi["Status"].keys():
            axarr.text(0.05,
                       0.60,
                       " ".join(
                           ['DynClamp:', comments_vi["Status"]["SyncPulse"]]),
                       transform=axarr.transAxes,
                       fontsize=10)
            axarr.text(0.40,
                       0.40,
                       " ".join(['g:', comments_vi["Status"]["g"]]),
                       transform=axarr.transAxes,
                       fontsize=10)
            axarr.text(0.40,
                       0.35,
                       " ".join(['E:', comments_vi["Status"]["E"]]),
                       transform=axarr.transAxes,
                       fontsize=10)
            axarr.text(0.40,
                       0.30,
                       " ".join(['gVgate:', comments_vi["Status"]["gvgate"]]),
                       transform=axarr.transAxes,
                       fontsize=10)
            axarr.text(0.40,
                       0.25,
                       " ".join(['EVgate:', comments_vi["Status"]["Evgate"]]),
                       transform=axarr.transAxes,
                       fontsize=10)
            axarr.text(0.40,
                       0.20,
                       " ".join(
                           ['VgateTau:', comments_vi["Status"]["vgatetau"]]),
                       transform=axarr.transAxes,
                       fontsize=10)
            axarr.text(0.40,
                       0.15,
                       " ".join(
                           ['VgateMid:', comments_vi["Status"]["vgatevmid"]]),
                       transform=axarr.transAxes,
                       fontsize=10)
            axarr.text(
                0.40,
                0.10,
                " ".join(['VgateSlope:', comments_vi["Status"]["vgateslope"]]),
                transform=axarr.transAxes,
                fontsize=10)

        #   add traces subplot
        axarrM = FHandles.add_subplot(1, 3, 2)

        #   add resistance subplot
        axarrR = FHandles.add_subplot(1, 3, 3)

        #   compute the resistance
        resist = abs((vi[i][:, 5] - vi[i][:, 3]) / vi[i][:, 0])

        #   draws traces subplot and resistance subplot
        for j, k in enumerate(traceDict):
            axarrM.plot(traceDict[k][0][:, 0],
                        traceDict[k][0][:, 1],
                        color=cmap[j])
            axarrM.fill_between(traceDict[k][0][:, 0],
                                traceDict[k][0][:, 1] - traceDict[k][0][:, 2],
                                traceDict[k][0][:, 1] + traceDict[k][0][:, 2],
                                color=cmap[j],
                                alpha=0.2)
            #   plot the resistance
            axarrR.plot(vi[i][j, 0], resist[j], color=cmap[j], marker='o')

        #   writes titles, only over the top figures
        # if i == 0:
        axarr.set_title(subfolder + ": " + "VI curve", fontsize=12)

        #   turns off x axis labels for all except the bottom plot
        # if i < VIiter-1:
        # axarr.set_xticklabels([], visible=False)

        #   writes x labels beside the bottom most plot
        # if i == VIiter-1:
        axarr.set_xlabel('Current [nA]')
        axarrM.set_xlabel("time [ms]")
        axarrR.set_xlabel('Current [nA]')

        #   writes y labels
        axarr.set_ylabel('Voltage [mV]')
        axarrR.set_ylabel("Resistance [MOhm]")
        axarrM.set_yticklabels([], visible=False)

        #   define file name
        filename = "".join([str(comments_vi["ReProIndex"]), '_', 'VI_curve'])
        fnames.append(filename)

        FHandles.savefig(ppjoin(path_to_folder, subfolder,
                                ".".join([filename, 'png'])),
                         transparent=True)
        FHandles.savefig(ppjoin(path_to_folder, subfolder,
                                ".".join([filename, 'svg'])),
                         transparent=True)
        plt.close()

    return fnames
Esempio n. 31
0
def fi_raster_return(ReProList_Unordered, info):

    #   Order!
    ReProList = OrderedDict(
        sorted(ReProList_Unordered.items(), key=lambda x: x[0], reverse=False))

    #   defines relative sizes of individual raster plots
    rug_bot = 0.1
    # rug_width  = 0.48
    rug_width = 0.22
    rug_height = 0.67 / (len(ReProList) + 1)
    rug_step = 1 / (len(ReProList) + 1)

    #   defines relative size of individual return plots
    return_bot = 0.67
    return_width = 0.11
    return_height = 0.67 / (len(ReProList) + 1)
    return_step = 1 / (len(ReProList) + 1)

    #   defines relative size of individual histograms
    hist_bot = 0.85
    hist_width = 0.11
    hist_height = 0.67 / (len(ReProList) + 1)
    hist_step = 1 / (len(ReProList) + 1)

    #   defines the color map
    cmapBlue = [
        "Blue", "DarkTurquoise", "CadetBlue", "DeepSkyBlue", "CornFlowerBlue",
        "DodgerBlue", "LightSkyBlue", "LightSteelBlue", "DarkCyan"
    ]

    cmap = [
        "DarkOrange", "Blue", "LimeGreen", "Gold", "Plum", "DarkRed",
        "LightSlateGray", "DodgerBlue", "Blue", "DarkRed", "DarkOrange",
        "LimeGreen", "Gold", "Plum", "LightSlateGray", "DodgerBlue", "Blue",
        "DarkRed", "DarkOrange", "LimeGreen", "Gold", "Plum", "LightSlateGray",
        "DodgerBlue", "Blue", "DarkRed", "DarkOrange", "LimeGreen", "Gold",
        "Plum", "LightSlateGray", "DodgerBlue", "Blue", "DarkRed",
        "DarkOrange", "LimeGreen", "Gold", "Plum", "LightSlateGray",
        "DodgerBlue", "Blue", "DarkRed", "DarkOrange", "LimeGreen", "Gold",
        "Plum", "LightSlateGray", "DodgerBlue"
    ]

    #   define and design grey color map
    color_spread = np.linspace(0.35, 0.9, len(ReProList))
    cmapGrey = [cm.Greys(x) for x in color_spread]

    #   define the input data file name
    filename = "ficurve-spikes.dat"

    #   get the file and load it
    relacs_file = load(filename)

    #   define the absolute size of the raster & return plot
    FHandles = plt.figure(figsize=(18, 3 * len(ReProList) + 3))
    # FHandles = plt.figure(figsize=(18, 3*len(ReProList) + 3))

    #   counter
    counter = 0

    #   define OrderedDict for freq_continuous between repros.
    freq = OrderedDict()

    #   define g_list
    g_list = []
    tau_list = []

    for k, v in ReProList.items():
        metas, _, datas = relacs_file.select({"ReProIndex": int(k), "I": v})

        #   sorts spikes
        spikeDict, stim_amps = fi_spikes_sort(metas, datas)

        #   computes instantaneous spike frequencies based on spikes
        freqDict, timeDict = fi_instant_freq(
            spikeDict)  #, metas[counter[i]:counter[i+1]])
        freq_continuous, _, timeLine, freqSD_continuous = fi_inst_freq_continuous(
            spikeDict, freqDict, metas)

        #   add axes
        axarr = FHandles.add_axes([
            rug_bot, counter * rug_step + (0.25 / (len(ReProList) + 1)),
            rug_width, rug_height
        ])
        axarr2 = axarr.twinx()
        #   plot
        axarr2.plot(timeLine, freq_continuous[v], color=cmapGrey[counter])
        for j in range(len(spikeDict[v])):
            axarr.plot(spikeDict[v][j],
                       np.ones(len(spikeDict[v][j])) + j,
                       '|',
                       color=cmap[j],
                       linewidth=2,
                       mew=1)

        axarr.set_xlabel('Time [ms]')
        axarr.set_ylabel('Iter')
        axarr2.set_ylabel('Frequency [Hz]', color=cmap[counter])

        #   insert computed freq into OrderedDict
        freq[k] = freq_continuous[v]

        if "SyncPulse" in metas[0]["Status"].keys():
            g_list.append(metas[0]["Status"]["gvgate"].split(".")[0])
            tau_list.append(metas[0]["Status"]["vgatetau"].split(".")[0])
            #   draws R against g r'$\alpha_i > \beta_i$'
            axarr.text(0.02,
                       0.8,
                       " ".join([
                           r'$g_{Vgate}$', ' = ',
                           metas[0]["Status"]["gvgate"].split('.')[0], ' nS'
                       ]),
                       color=cmapGrey[counter],
                       transform=axarr.transAxes,
                       fontsize=12)
            axarr.text(0.7,
                       0.8,
                       " ".join([
                           r'$\tau_{Vgate}$', ' = ',
                           metas[0]["Status"]["vgatetau"].split('.')[0], ' ms'
                       ]),
                       color=cmapGrey[counter],
                       transform=axarr.transAxes,
                       fontsize=12)
        #   grid
        axarr.grid(b=True)

        #   set y lim
        axarr.set_ylim(0, len(spikeDict[v]) + 1)
        axarr.set_xlim(-100, 500)

        #   drawing the second plot, containing time intervals
        axarrM = FHandles.add_axes([
            return_bot, counter * hist_step + (0.25 / (len(ReProList) + 1)),
            return_width, return_height
        ])

        allIntervalsZero = np.array([0])
        for gnj in range(len(timeDict[v])):
            allIntervals = np.append(allIntervalsZero, timeDict[v][gnj])

            for m in range(len(allIntervals) - 1):
                # axarrM.plot(allIntervals[m],allIntervals[m+1], "o", color= cmap[gnj], ms=5)
                #  axarrM.plot(allIntervals[m],allIntervals[m+1]-allIntervals[m], "o", color= cmap[gnj], ms=5)
                axarrM.plot(allIntervals[m],
                            allIntervals[m + 1] - allIntervals[m],
                            "o",
                            color=cmapGrey[counter],
                            ms=5)

        # axarrR.set_xlim(0, np.percentile(allIntervals,97))
        # axarrR.set_ylim(0, np.percentile(allIntervals,97))

        axarrM.set_xlim(0, 12)
        axarrM.set_ylim(-6, 6)
        # axarrM.set_xlim(0, np.percentile(allIntervals,97))
        # axarrM.set_ylim(0, np.percentile(allIntervals,97))

        axarrM.set_xlabel('n ISI [ms]')
        axarrM.set_ylabel('n+1 ISI - n ISI [ms]')

        #   drawing the third plot, containing time interval distribution
        axarrR = FHandles.add_axes([
            hist_bot, counter * hist_step + (0.25 / (len(ReProList) + 1)),
            hist_width, hist_height
        ])

        #   put all insterspike intervals into a single numpy array
        allIntervals = []

        for znj in range(len(timeDict[v])):
            IntervalsN = np.append(allIntervalsZero, timeDict[v][znj])
            # for ml in range((IntervalsN).shape[0]-1):
            #     allIntervals.append((IntervalsN[ml+1]-IntervalsN[ml]))

            IntervalsN1 = np.append(timeDict[v][znj], allIntervalsZero)
            allIntervals.extend((IntervalsN1 - IntervalsN)[1:-2])

        allIntervals = np.array(allIntervals)
        # allIntervals = allIntervals[1:-1]
        #   histogram calculation for the ISI distribution
        if allIntervals.shape[0] > 1:
            # allIntervals = allIntervals[allIntervals<np.percentile(allIntervals,95)]
            if allIntervals.shape[0] > 1:
                axarrR.hist(allIntervals,
                            bins=20,
                            normed=False,
                            color=cmapGrey[counter],
                            histtype='stepfilled',
                            orientation='horizontal')

        #   add labels
        axarrR.set_ylabel('n+1 ISI - n ISI [ms]')
        axarrR.set_xlabel('Count')

        #   set x lim
        # axarrR.set_xticks([0, 5, 10, 15])
        # axarrR.set_xticklabels([0, 5, 10, 15])
        # axarrR.set_xlim(0, np.percentile(allIntervals,97))
        axarrR.set_ylim(-6, 6)

        #   increase counter
        counter = counter + 1

    #   add top plot to compare instantaneous frequencies
    axarr = FHandles.add_axes([
        rug_bot, counter * rug_step + (0.25 / (len(ReProList) + 1)), rug_width,
        rug_height
    ])
    for l, w in enumerate(freq):
        axarr.plot(timeLine, freq[w], color=cmapGrey[l])

    #   writes x labels
    axarr.set_xlabel('Time [ms]')

    #   writes y labels
    axarr.set_ylabel('Frequency [Hz]')

    #   grid
    # axarr.grid(b=True)

    axarr.set_xlim([-50, 550])

    #   plot comments and other annotations
    axarr.text(0.25,
               0.90,
               " ".join(['Apteronotus leptorhynchus', expfolder]),
               transform=axarr.transAxes,
               fontsize=10)
    axarr.text(0.55,
               0.75,
               " ".join(['Stimulus amplitude:', metas[0]["I"]]),
               transform=axarr.transAxes,
               fontsize=10)

    #   extracts the g & plots
    if "SyncPulse" in metas[0]["Status"].keys():
        #   draws R against g
        for i in range(len(g_list)):
            axarr.text(0.75,
                       0.60 - 0.1 * i,
                       " ".join([r'$\tau_{Vgate}$', ' = ', tau_list[i],
                                 ' ms']),
                       color=cmapGrey[i],
                       transform=axarr.transAxes,
                       fontsize=10)
            axarr.text(0.02,
                       0.60 - 0.1 * i,
                       " ".join([r'$g_{Vgate}$', ' = ', g_list[i], ' nS']),
                       color=cmapGrey[i],
                       transform=axarr.transAxes,
                       fontsize=10)

    #   extract ReProIx, sort them and merge them
    keys = [i for i in ReProList]
    keys = sorted(list(map(int, keys)))
    keys = '.'.join(map(str, keys))
    #   extract current values
    current = list(ReProList.values())[0]
    #   join keys, currents and iterations
    name = '_'.join([keys, current])

    #   write title on the figure
    FHandles.suptitle("".join(
        [info["Cell"]["Location"], ':', expfolder, "FI_freq_dyn", name]),
                      fontsize=12)

    #   Save figures
    FHandles.savefig(ppjoin(".".join([expfolder, "FI_freq", name, 'png'])),
                     transparent=True)
    FHandles.savefig(ppjoin(".".join([expfolder, "FI_freq", name, 'svg'])),
                     transparent=True)
    #   dump .pdf to selected folder
    FHandles.savefig(ppjoin(
        '../overviewFI/', ".".join([
            "_".join(
                [info["Cell"]["Location"], expfolder, "FI_freq_dyn", name]),
            'pdf'
        ])),
                     transparent=True)