Ejemplo n.º 1
0
def pop_act_time_course(DATA, Model, t0=400., smoothing=10,
                            FIGSIZE=(4,2),
                            YTICKS=[0, 15, 30], XTICKS=[0, 500, 1000]):
    
    tstop = Model['T2']+2.*Model['DT2']
    t = np.arange(int(tstop/Model['dt']))*Model['dt']
    ismooth = int(smoothing/Model['dt'])
    MEAN_FE, MEAN_FI, MEAN_FD = [np.zeros((len(DATA), len(t))) for i in range(3)]
    
    for n, data in enumerate(DATA):
        
        MEAN_FE[n, :] = gaussian_smoothing(data['POP_ACT_RecExc'], ismooth)
        MEAN_FI[n, :] = gaussian_smoothing(data['POP_ACT_RecInh'], ismooth)
        MEAN_FD[n, :] = gaussian_smoothing(data['POP_ACT_DsInh'], ismooth)

    faff = waveform(t, Model)
    cond = (t>t0)
    t -= (Model['T1']-Model['DT1'])
    
    fig3, ax = plt.subplots(1, figsize=FIGSIZE)

    for vec, label, color in zip([MEAN_FE, MEAN_FI, MEAN_FD],
                                 ['$\\nu_e$', '$\\nu_i$', '$\\nu_d$'],
                                 [Green, Red, Purple]):
        plt.plot(t[cond], vec.mean(axis=0)[cond], color=color, label=label, lw=3)
        plt.fill_between(t[cond],
                         vec.mean(axis=0)[cond]-vec.std(axis=0)[cond],
                         vec.mean(axis=0)[cond]+vec.std(axis=0)[cond],
                         color=color, alpha=.5)

    ax.legend(frameon=False)
    set_plot(ax, yticks=YTICKS, xticks=XTICKS,
             ylabel='rate (Hz)', xlabel='time (ms)')
    return fig3
Ejemplo n.º 2
0
def compute_relationship_smooth_and_plot(cbins,
                                         pop_hist,
                                         num_hist,
                                         ax,
                                         Nsmooth=10):

    x, y, sy = [], [], []
    for xx, ph, N in zip(cbins, pop_hist, num_hist):
        if len(ph) > 1:
            x.append(xx)
            y.append(np.sum(np.array(ph) * np.array(N) / np.sum(N)))
            sy.append(
                np.sqrt(
                    np.sum(
                        (np.array(ph) - y[-1])**2 * np.array(N) / np.sum(N))))

    xx, yy, ss = np.linspace(np.min(x), np.max(x),
                             int(Nsmooth / 2 * len(x))), np.zeros(
                                 int(Nsmooth / 2 * len(x))), np.zeros(
                                     int(Nsmooth / 2 * len(x)))
    for i in range(len(xx)):
        i0 = np.argmin((xx[i] - x)**2)
        yy[i], ss[i] = y[i0], sy[i0]

    yy, ss = gaussian_smoothing(yy, Nsmooth), gaussian_smoothing(ss, Nsmooth)
    ax.plot(xx, yy, 'k-', lw=1.5)
    ax.fill_between(xx, yy + ss, yy - ss, color='k', alpha=.5, lw=0)
Ejemplo n.º 3
0
def conductance_time_course(DATA, Model, t0=400., smoothing=10,
                            FIGSIZE=(4,2),
                            YTICKS=[0, 4, 8], XTICKS=[-400, 0, 400, 800]):
    
    fig2, ax = plt.subplots(1, figsize=FIGSIZE)

    tstop = Model['T2']+2.*Model['DT2']
    t = np.arange(int(tstop/Model['dt']))*Model['dt']
    ismooth = int(smoothing/Model['dt'])
    MEAN_GE, STD_GE = np.zeros((len(DATA), len(t))), np.zeros((len(DATA), len(t)))
    MEAN_GI, STD_GI = np.zeros((len(DATA), len(t))), np.zeros((len(DATA), len(t)))
    
    for n in range(len(DATA)):

        data = DATA[n].copy()
        # smoothing conductances
        for i, gsyn in enumerate(data['GSYNe_RecExc']):
            data['GSYNe_RecExc'][i] = gaussian_smoothing(gsyn, ismooth)
        for i, gsyn in enumerate(data['GSYNi_RecExc']):
            data['GSYNi_RecExc'][i] = gaussian_smoothing(gsyn, ismooth)
            
        MEAN_GE[n, :] = np.array(data['GSYNe_RecExc']).mean(axis=0)/Model['RecExc_Gl']
        STD_GE[n, :] = np.array(data['GSYNe_RecExc']).std(axis=0)/Model['RecExc_Gl']
        MEAN_GI[n, :] = np.array(data['GSYNi_RecExc']).mean(axis=0)/Model['RecExc_Gl']
        STD_GI[n, :] = np.array(data['GSYNi_RecExc']).std(axis=0)/Model['RecExc_Gl']
        
    cond = (t>t0)
    t -= (Model['T1']-Model['DT1'])
    ax.plot(t[cond], MEAN_GE.mean(axis=0)[cond], color=Green, lw=2, label=r'$G_{e}$')
    ax.plot(t[cond], MEAN_GI.mean(axis=0)[cond], color=Red, lw=2, label=r'$G_{i}$')
    ax.fill_between(t[cond],
                    MEAN_GE.mean(axis=0)[cond]-STD_GE.mean(axis=0)[cond],
                    MEAN_GE.mean(axis=0)[cond]+STD_GE.mean(axis=0)[cond],
                    alpha=.6, color=Green)
    ax.fill_between(t[cond],
                    MEAN_GI.mean(axis=0)[cond]-STD_GI.mean(axis=0)[cond],
                    MEAN_GI.mean(axis=0)[cond]+STD_GI.mean(axis=0)[cond],
                    alpha=.6, color=Red)
    ax.legend(frameon=False)
    set_plot(ax, yticks=YTICKS, xticks=XTICKS,
             ylabel='$G_{syn}$/$g_L$', xlabel='time (ms)')
    return fig2, ax
def spike_train_distance(Pattern1, Pattern2, data, tau=5e-3):
    """
    A similarity coef for spike trains
    """
    t = np.arange(int(data['stim_duration'] / data['dt'])) * data['dt']
    train1, train2 = 0 * t, 0 * t
    for tspike in Pattern1:
        if (tspike >= 0) and (tspike < data['stim_duration']):
            train1[int(tspike / data['dt'])] = 1
    for tspike in Pattern2:
        if (tspike >= 0) and (tspike < data['stim_duration']):
            train2[int(tspike / data['dt'])] = 1

    train1 = gaussian_smoothing(train1, int(tau / data['dt']))
    train2 = gaussian_smoothing(train2, int(tau / data['dt']))
    std1, std2 = np.std(train1), np.std(train2)

    if (std1 > 0) and (std2 > 0):
        # so that corrcoef works
        return np.corrcoef(train1, train2)[0, 1]
    else:
        return -1
Ejemplo n.º 5
0
def compare_two_regimes(Model, smooth=20):

    for f, color in zip(['sas.zip', 'bs.zip'], [Blue, Orange]):
        Model['filename'] = f
        F_aff, DATA = get_scan(Model)
        F_out = []
        for i, data in enumerate(DATA):
            cond = data['t'] > 350
            Fout.append(
                gaussian_smoothing(data['POP_ACT_RecExc'],
                                   int(smooth / data['dt']))[cond].max())
        ax.plot(F_aff, F_out, color=color)

    set_plot(ax,
             xlabel=' $\delta \\nu_a $ (Hz)',
             ylabel=' $\\delta \nu_e$ (Hz)')
Ejemplo n.º 6
0
def analyze_scan(Model, smooth=10, filename=None):

    F_aff, seeds, Model, DATA = get_scan(Model, filename=filename)

    fig1, ax1 = plt.subplots(1, figsize=(4, 2.5))
    plt.subplots_adjust(left=.3, bottom=.4)
    fig2, ax2 = plt.subplots(1, figsize=(4, 2.5))
    plt.subplots_adjust(left=.3, bottom=.4)

    for i in range(len(F_aff)):
        TRACES = []
        for j in range(len(seeds)):
            data = DATA[i * len(seeds) + j]
            # TRACES.append(data['POP_ACT_RecExc'])
            TRACES.append(
                gaussian_smoothing(data['POP_ACT_RecExc'],
                                   int(smooth / data['dt'])))
        mean = np.array(TRACES).mean(axis=0)
        std = np.array(TRACES).std(axis=0)
        # smoothing
        cond = data['t'] > 350
        ax1.plot(data['t'][cond],
                 mean[cond],
                 color=cm.copper(i / len(F_aff)),
                 lw=2)
        ax1.fill_between(data['t'][cond],
                         mean[cond] - std[cond],
                         mean[cond] + std[cond],
                         color=cm.copper(i / len(F_aff)),
                         alpha=.5)
        ax2.plot(data['t'][cond],
                 data['faff'][cond],
                 color=cm.copper(i / len(F_aff)))
    set_plot(ax1, xlabel='time (ms)', ylabel='rate (Hz)')
    set_plot(ax2, xlabel='time (ms)', ylabel='rate (Hz)')
    # put_list_of_figs_to_svg_fig([fig1, fig2])
    return fig1, fig2
Ejemplo n.º 7
0
def make_single_resp_fig(F_aff,
                         seeds,
                         Model,
                         DATA_SA,
                         DATA_BA,
                         smoothing=30,
                         t0=430,
                         XTICKS=np.arange(4) * 250,
                         tstart_vis=350,
                         tend_vis=700,
                         t_after_stim_for_bsl=1400.):

    fig1, ax1 = plt.subplots(1, figsize=(4, 2.5))
    plt.subplots_adjust(left=.3, bottom=.4)
    fig2, ax2 = plt.subplots(1, figsize=(4, 2.5))
    plt.subplots_adjust(left=.3, bottom=.4)
    fig3, ax3 = plt.subplots(1, figsize=(4, 2.5))
    plt.subplots_adjust(left=.3, bottom=.4)

    for i in range(len(F_aff)):
        SA_TRACES, BA_TRACES = [], []
        for j in range(len(seeds)):
            data = DATA_SA[i * len(seeds) + j]
            v = gaussian_smoothing(data['POP_ACT_RecExc'],
                                   int(smoothing / Model['dt']))
            SA_TRACES.append(v)
            data = DATA_BA[i * len(seeds) + j]
            v = gaussian_smoothing(data['POP_ACT_RecExc'],
                                   int(smoothing / Model['dt']))
            BA_TRACES.append(v)
        SA_mean, SA_std = np.array(SA_TRACES).mean(
            axis=0), np.array(SA_TRACES).std(axis=0)
        BA_mean, BA_std = np.array(BA_TRACES).mean(
            axis=0), np.array(BA_TRACES).std(axis=0)
        data = DATA_SA[i * len(seeds) + j]
        cond, cond2 = (data['t'] > tstart_vis) & (
            data['t'] < tend_vis), data['t'] > t_after_stim_for_bsl
        ax2.plot(data['t'][cond] - t0,
                 SA_mean[cond] - SA_mean[cond2].mean(),
                 color=cm.viridis(i / len(F_aff)),
                 lw=2)
        ax2.fill_between(data['t'][cond] - t0,
                         SA_mean[cond] - SA_std[cond] - SA_mean[cond2].mean(),
                         SA_mean[cond] + SA_std[cond] - SA_mean[cond2].mean(),
                         color=cm.viridis(i / len(F_aff)),
                         alpha=.5)
        ax3.plot(data['t'][cond] - t0,
                 BA_mean[cond] - BA_mean[cond2].mean(),
                 color=cm.viridis(i / len(F_aff)),
                 lw=2)
        ax3.fill_between(data['t'][cond] - t0,
                         BA_mean[cond] - BA_std[cond] - BA_mean[cond2].mean(),
                         BA_mean[cond] + BA_std[cond] - BA_mean[cond2].mean(),
                         color=cm.viridis(i / len(F_aff)),
                         alpha=.5)
        ax1.plot(data['t'][cond] - t0,
                 data['faff'][cond] - data['faff'][cond2].mean(),
                 color=cm.viridis(i / len(F_aff)),
                 lw=3)
    set_plot(ax1,
             xlabel='time (ms)',
             ylabel='$\delta \\nu_a$  (Hz)',
             xticks=XTICKS)
    set_plot(ax2,
             xlabel='time (ms)',
             ylabel='$\delta \\nu_e$ (Hz)',
             xticks=XTICKS)
    set_plot(ax3,
             xlabel='time (ms)',
             ylabel='$\delta \\nu_e$ (Hz)',
             xticks=XTICKS)
    return fig1, fig2, fig3
Ejemplo n.º 8
0
    parser.add_argument("-a", "--analyze", help="perform analysis of params space",
                        action="store_true")
    parser.add_argument("-rm", "--run_multiple_seeds", help="run with multiple seeds",
                        action="store_true")
    parser.add_argument("--debug", help="debug", action="store_true")
    
    args = parser.parse_args()
    Model = vars(args)

    if args.analyze:
        analyze_sim(Model)
        ntwk.show()
    elif args.run_multiple_seeds:
        Model['filename'] = 'sparse_vs_balanced/data/time_varying_input.h5'
        run_scan(Model)
    elif args.debug:
        Model, seeds, DATA = get_scan({}, filename='sparse_vs_balanced/data/time_varying_input.zip')
        hist_of_Vm_pre_post(DATA, Model, nbin=30)
        # one_Vm_fig(DATA[0], Model)
        ntwk.show()
    else:
        Model['filename'] = 'data/time_varying_input_debug.h5'
        NTWK = run_sim(Model)
        Nue, Nui, Nud = NTWK['POP_ACT'][0].rate/ntwk.Hz, NTWK['POP_ACT'][1].rate/ntwk.Hz,\
                        NTWK['POP_ACT'][2].rate/ntwk.Hz
        ntwk.plot(NTWK['POP_ACT'][0].t/ntwk.ms, gaussian_smoothing(Nue,int(20./0.1)))
        ntwk.plot(NTWK['POP_ACT'][0].t/ntwk.ms, gaussian_smoothing(Nui,int(20./0.1)))
        ntwk.plot(NTWK['POP_ACT'][0].t/ntwk.ms, gaussian_smoothing(Nud,int(20./0.1)))
        ntwk.show()


if __name__ == '__main__':

    from data_analysis.processing.signanalysis import gaussian_smoothing
    # import the model defined in root directory
    sys.path.append(str(pathlib.Path(__file__).resolve().parents[2]))
    from model import *

    args = parser.parse_args()
    Model = vars(args)
    if Model['p_AffExc_DsInh'] == 0.:
        print(
            '---------------------------------------------------------------------------'
        )
        print(
            'to run the 3 pop model, you need to set an afferent connectivity proba !'
        )
        print(
            'e.g run: \n                python running_3pop_model.py --p_AffExc_DsInh 0.1'
        )
        print(
            '---------------------------------------------------------------------------'
        )
    else:
        NTWK = run_3pop_ntwk_model(Model, tstop=400)
        Nue = NTWK['POP_ACT'][0].rate / ntwk.Hz
        ntwk.plot(NTWK['POP_ACT'][0].t / ntwk.ms,
                  gaussian_smoothing(Nue, int(20. / 0.1)))
        ntwk.show()
Ejemplo n.º 10
0
def load_data(
        fn,
        args,
        fraction_extent_of_data=[0., 1.],  # for cross-validation
        verbose=False,
        with_spiking_activity=True,
        chosen_window_only=True,
        full_processing=False,
        with_Vm_low_freq=False):

    if verbose:
        print('analyzing :', fn)
    with open(fn.replace(s1, s2).replace('abf', 'json')) as f:
        props = json.load(f)

    if chosen_window_only:
        t0, t1 = np.float(props['t0']), np.float(props['t1'])
    else:
        t0, t1 = 0, np.inf

    raw_data = load_file(fn, zoom=[t0, t1])

    data_full = {
        't': raw_data[0] - raw_data[0][0],
        'Vm': raw_data[1][1],
        'Extra': raw_data[1][0]
    }
    # in case
    cond = (np.arange(len(data_full['t']))>=int(len(data_full['t'])*fraction_extent_of_data[0])) &\
           (np.arange(len(data_full['t']))<=int(len(data_full['t'])*fraction_extent_of_data[1]))
    data = {
        't': data_full['t'][cond],
        'Vm': data_full['Vm'][cond],
        'Extra': data_full['Extra'][cond],
        'name': fn.split(os.path.sep)[-1],
        'filename': fn
    }

    data['dt'] = data['t'][1] - data['t'][0]

    if 'offset' in props:
        data['Vm'] += float(props['offset'])

    isubsampling = int(args.subsampling_period / data['dt'])
    data['sbsmpl_Vm'] = data['Vm'][::isubsampling][:-1]
    data['sbsmpl_Extra'] = data['Extra'][::isubsampling][:-1]
    data['sbsmpl_t'] = data['t'][::isubsampling][:-1]
    data['sbsmpl_dt'] = data['dt'] * isubsampling

    if full_processing:
        # compute the pLFP
        if verbose:
            print('processing LFP')
        functions.preprocess_LFP(data,
                                 freqs=np.linspace(args.f0 / args.w0,
                                                   args.f0 * args.w0,
                                                   args.wavelet_number),
                                 new_dt=args.subsampling_period,
                                 percentile_for_p0=args.percentile_for_p0,
                                 smoothing=args.T0)
        data['pLFP'] = data['pLFP'][:min(
            [len(data['pLFP']), len(data['sbsmpl_t'])])]

        # compute the Network State Index
        if verbose:
            print('computing NSI')
        functions.compute_Network_State_Index(
            data,
            freqs=np.linspace(args.delta_band[0], args.delta_band[1],
                              8),  # freqs in delta band
            Tstate=args.Tstate,
            Var_criteria=data['p0'],  # HERE TAKING NOISE AS CRITERIA !!!
            alpha=args.alpha,
            T_sliding_mean=args.T_sliding_mean,
            with_Vm_low_freq=with_Vm_low_freq)

        # extract delta and gamma power from LFP
        if verbose:
            print('computing delta and gamma')
        functions.compute_delta_and_gamma(data, args)

        # MUA from extracellular signal
        if with_spiking_activity:
            if verbose:
                print('computing MUA, spikes and FR')
            data['MUA'] = gaussian_smoothing(\
                            np.abs(butter_bandpass_filter(data['Extra'],\
                                     args.MUA_band[0], args.MUA_band[1], 1./data['dt'], order=5)),\
                                           int(args.MUA_smoothing/data['dt']))

            data['sbsmpl_MUA'] = 1e-3 * data['MUA'][::int(
                args.subsampling_period / data['dt'])][:-1]  # in uV
            # Spike times from Vm
            data['tspikes'] = data['t'][np.argwhere(
                (data['Vm'][:-1] <= args.spike_threshold)
                & (data['Vm'][1:] > args.spike_threshold)).flatten()]
            Vpeaks = []
            for tt in data['tspikes']:
                Vpeaks.append(
                    np.max(data['Vm'][(data['t'] > tt - 5e-3)
                                      & (data['t'] < tt + 5e-3)])
                )  # max in Vm surrouding spike time
            data['Vpeak_spikes'] = np.array(Vpeaks)
            data['sbsmpl_FR'] = np.histogram(
                data['tspikes'], bins=data['sbsmpl_t'])[0] / data['sbsmpl_dt']

    return data