Exemplo n.º 1
0
def spk_plot_pS(filepath,
                deltaT,
                datapath,
                sampling_rate=25000,
                str_leg='Data'):
    """Plots the avalanche size distribution for a single hdf5 dataset, and a single deltaT
	
	Args:
	    filepath (str): Path to the .hdf5 dataset
	    deltaT (int): Binsize to use, *in timesteps*
	    datatype (str): 'coarse', 'sub' or 'both' (compares both sampling types) 
	    str_leg (str, optional): plot legend
	    threshold (int, optional): Threshold in standard deviations of the signal (for coarse) 
	    bw_filter (bool, optional): Toggles butterworth filtering (for coarse)
	    timesteps (None, optional): Length of the dataset in timesteps
	    channels (None, optional): Number of channels in the dataset
	"""

    file = h5py.File(filepath, 'r')
    data_spk = file[datapath][:]

    #Loads and thresholds data
    data_th = avalanche.analyze_sim_raw(filepath, threshold, datatype,
                                        bw_filter, timesteps, channels)

    #Bins data
    data_binned = avalanche.bin_data(data=data_th, binsize=deltaT)

    #Gets S and plots it
    S = avalanche.get_S(data_binned)
    plot.pS(S, label=str_leg)
Exemplo n.º 2
0
def sim_plot_pS(filepath,
                deltaT,
                datatype,
                str_leg='Data',
                threshold=3,
                bw_filter=True,
                timesteps=None,
                channels=None,
                save_fig=None,
                show_error=True,
                color='k',
                lineType='-',
                zorder=2):
    """Plots the avalanche size distribution for a single hdf5 dataset, and a single deltaT. If [filepath] is a list, averages over datasets.
	
	Args:
	    filepath (str): Path to the .hdf5 dataset
	    deltaT (int): Binsize to use, *in timesteps*
		datatype (str): 'coarse', 'sub'
	    str_leg (str, optional): plot legend
	    threshold (int, optional): Threshold in standard deviations of the signal (for coarse) 
	    bw_filter (bool, optional): Toggles butterworth filtering (for coarse)
	    timesteps (None, optional): Length of the dataset in timesteps
	    channels (None, optional): Number of channels in the dataset
	    color (str, optional): plot color
	"""

    if type(filepath) is not list:
        filepath = [filepath]
    nreps = len(filepath)

    S_list = []
    for filepath_file in filepath:
        #Loads and thresholds data
        data_th = avalanche.analyze_sim_raw(filepath_file, threshold, datatype,
                                            bw_filter, timesteps, channels)

        #Bins data
        data_binned = avalanche.bin_data(data=data_th, binsize=deltaT)

        #Gets S and plots it
        S_list.append(avalanche.get_S(data_binned))
    plot.pS_mean(S_list,
                 label=str_leg,
                 lineType=lineType,
                 color=color,
                 show_error=show_error,
                 zorder=zorder)

    #Sets up figure
    fig = plt.gcf()
    ax = plt.gca()
    ax.set_xlim([1, 500])
    ax.set_ylim([1e-6, 1])

    if save_fig is not None:
        #Saves figure png and pickled data
        fig.savefig(save_fig + '.pdf', bbox_inches="tight")
        with open(save_fig + '.pkl', 'wb') as file:
            pickle.dump(fig, file)
Exemplo n.º 3
0
def save_plot(data_dir, filename, threshold, datatype, reps, binsize,
              bw_filter):

    #Save location
    if bw_filter:
        fig_dir = data_dir + 'plot_filtered/'
    else:
        fig_dir = data_dir + 'plot_unfiltered/'

    #Plots results
    plt.figure()
    plt.title(filename)
    plt.xlabel('S')
    plt.ylabel('p(S)')
    plt.yscale('log')
    plt.xscale('log')
    plt.xlim(1, 1e3)
    plt.ylim(1e-5, 1)
    X = np.arange(5, 64)
    Y = np.power(X, -1.5)
    Y = 5 * Y / np.sum(Y)
    plt.plot(X, Y, linestyle='--', color='black', label=r'$p(S)~S^{-1.5}$')

    #Runs analysis for all binsizes
    for bs in binsize:
        print(filename + ' ,running for b = {}'.format(bs))

        S_list = []
        for rep in range(reps):

            #Creates filepath
            filepath = data_dir + filename + '_r{:02d}.hdf5'.format(rep)

            #Analyzes rep
            data_thresholded = avalanche.analyze_sim_raw(filepath=filepath,
                                                         threshold=threshold,
                                                         datatype=datatype,
                                                         bw_filter=bw_filter)
            data_binned = avalanche.bin_data(data=data_thresholded, binsize=bs)

            S_list.append(avalanche.get_S(data_binned))

        plot.pS_mean(S_list, label='bs = {}'.format(bs))

    #Creates save dir
    if not os.path.exists(fig_dir):
        os.makedirs(fig_dir)

    #Saves fig
    str_fig = fig_dir + datatype + '_pS_' + filename + '_th{}'.format(
        threshold) + '.png'
    plt.savefig(str_fig)
    plt.close()
Exemplo n.º 4
0
def save_threshold(data_dir,
                   filename,
                   threshold,
                   reps,
                   bw_filter,
                   timesteps=None):
    """Thresholds datasets in [data_dir], numbered in files ending with '_rep%2d.hdf5'. Results are saved to [data_dir]/thresholded_[filtered/unfiltered]/filename.hdf5, where each columnn corresponds to a repetition.
	
	Args:
	    data_dir (str): Location to search for hdf5 datasets
	    filename (str): dataset name format
	    threshold (float): Threshold to use
	    reps (int): Number of repetitions in the dataset
	    bw_filter (bool): Whether to bandpass the coarse signal with a butterworth 4th order filter
	    timesteps (None, optional): Timeseries length to use, in timesteps.
	"""

    #Definitions
    if bw_filter:
        dir_threshold = 'thresholded_filtered/'
    else:
        dir_threshold = 'thresholded_unfiltered/'
    data_save = data_dir + dir_threshold
    str_savefile = data_save + filename + '_th{:0.1f}.hdf5'.format(threshold)
    datatypes = ['coarse', 'sub']

    #Gets timesteps
    if timesteps is None:
        filepath_0 = data_dir + filename + '_r00.hdf5'
        file_raw = h5py.File(filepath_0, 'r')
        timesteps = file_raw['data/activity'].shape[0]
        file_raw.close()

    #Creates path to .h5 save file
    if not os.path.exists(data_save):
        os.makedirs(data_save)
    file = h5py.File(str_savefile, 'w')  #Overwrites old file

    #Saves thresholded data ('coarse' and 'sub')
    for datatype in datatypes:

        #Creates .h5 dataset
        file.create_dataset(datatype,
                            shape=(reps, timesteps),
                            dtype=int,
                            chunks=(1, timesteps),
                            compression=1,
                            maxshape=(None, timesteps))
        file[datatype].attrs['threshold'] = threshold

        for rep in range(reps):

            #Creates filepath
            filepath = data_dir + filename + '_r{:02d}.hdf5'.format(rep)

            #Analyzes rep
            data_thresholded = avalanche.analyze_sim_raw(filepath=filepath,
                                                         threshold=threshold,
                                                         datatype=datatype,
                                                         bw_filter=bw_filter)
            file[datatype][rep, :] = np.int32(data_thresholded)

    #Copies population activity data
    file.create_dataset('activity',
                        shape=(reps, timesteps),
                        dtype=float,
                        chunks=(1, timesteps),
                        compression=1,
                        maxshape=(None, timesteps))
    for rep in range(reps):
        filepath = data_dir + filename + '_r{:02d}.hdf5'.format(rep)
        file_raw = h5py.File(filepath, 'r')
        file['activity'][rep, :] = file_raw['data/activity'][:]
        file_raw.close()

    #Flushes and closes file
    file.close()
Exemplo n.º 5
0
def sim_plot_deltaT(filepath,
                    deltaT,
                    datatype,
                    threshold=3,
                    S_fit_max=50,
                    bw_filter=True,
                    timesteps=None,
                    channels=None,
                    save_fig=None):
    """Plots p(S), m_av and fits p(S)~S^-alpha, for a list of binsizes. If [filepath] is a list, averages over datasets.
	
	Args:
	    filepath (str): Path to the .hdf5 dataset
	    deltaT (int): Vector of binsizes (in timesteps)
	    datatype (str): 'coarse' or 'sub'
	    threshold (int, optional): Threshold in standard deviations of the signal (for coarse) 
	    S_fit_max (int, optional): Limit on the power law fit range (default 50)
	    bw_filter (bool, optional): Whether to use a Butterworth filter to bandpass the signal (for coarse)
	    timesteps (None, optional): Number of timesteps to use (default extracts from dataset)
	    channels (None, optional): Number of electrode channels to use (default extracts from dataset)   
	    save_fig (str, optional): Saves the figure under fig/[save_fig].png
	"""

    if type(filepath) is not list:
        filepath = [filepath]

    #Parameters
    timescale_ms = 2  #length of a timestep, in ms
    fig_dir = 'fig/'

    #Lets avoid list shenenigans
    deltaT = np.array(deltaT)
    nbins = deltaT.size
    nreps = len(filepath)

    S_list = []
    deltaT_ms = np.array(timescale_ms * deltaT)
    alpha_exp = np.zeros((nreps, nbins))
    m_av = np.zeros((nreps, nbins))

    #Runs analysis for each dataset
    for j in range(nreps):

        S_list_rep = []

        #Loads and thresholds data
        data_th = avalanche.analyze_sim_raw(filepath[j], threshold, datatype,
                                            bw_filter, timesteps, channels)

        #Bins data for each deltaT and calculates observables
        for i in range(nbins):

            data_binned = avalanche.bin_data(data=data_th, binsize=deltaT[i])

            #Obtains avalanche list S
            S = avalanche.get_S(data_binned)

            #Saves S to list
            S_list_rep.append(S)

            #Calculates alpha_exp
            fit = powerlaw.Fit(S,
                               discrete=True,
                               estimate_discrete=False,
                               xmin=1,
                               xmax=S_fit_max)
            alpha_exp[j, i] = fit.alpha

            #Calculates m_av
            m_av[j, i] = fitting.m_avalanche(data_binned)

        #Appends S list from each repetition to pS
        S_list.append(S_list_rep)

    #Sets up subplots
    fig = plt.figure(constrained_layout=True)
    gs = fig.add_gridspec(2, 3)
    ax_alpha = fig.add_subplot(gs[0, 2])
    ax_mav = fig.add_subplot(gs[1, 2])
    ax_ps = fig.add_subplot(gs[0:2, 0:2])

    #Plots pS_mean for each binsize
    for k in range(nbins):

        #Rebuild S_list for each binsize
        S_bin = []
        for j in range(nreps):
            S_bin.append(S_list[j][k])

        #Gets largest avalanche from the list (+1 for zero_index)
        S_max = int(max([Si.max() for Si in S_bin]) + 1)

        #Obtains pS
        pS = np.zeros((nreps, S_max))

        for i in range(nreps):
            for j in range(S_max):
                pS[i, j] = np.sum(S_bin[i] == j)
            pS[i, :] = pS[i, :] / np.sum(pS[i, :])

        #Obtains mean and STD
        pS_mean = np.mean(pS, axis=0)
        pS_std = np.std(pS, axis=0)
        pS_up = pS_mean + pS_std / 2
        pS_dw = pS_mean - pS_std / 2

        #Plots pS_mean
        str_leg = r'$\Delta$t = {:d} ms'.format(timescale_ms * deltaT[k])
        plt.fill_between(range(S_max), pS_up, pS_dw, alpha=0.25, lw=0)
        plt.plot(range(S_max), pS_mean, label=str_leg)

    #Plots alpha_exp
    alpha_exp_mean = np.mean(alpha_exp, axis=0)
    alpha_exp_std = np.std(alpha_exp, axis=0)
    ax_alpha.errorbar(deltaT_ms,
                      alpha_exp_mean,
                      yerr=alpha_exp_std / 2,
                      fmt='o-',
                      color='k',
                      fillstyle='full')

    #Plots m_av
    m_av_mean = np.mean(m_av, axis=0)
    m_av_std = np.std(m_av, axis=0)
    ax_mav.errorbar(deltaT_ms,
                    m_av_mean,
                    yerr=m_av_std / 2,
                    fmt='o-',
                    color='k',
                    fillstyle='full')

    #Beatifies plots
    ax_ps.set_xlabel('S')
    ax_ps.set_ylabel('p(S)')
    ax_ps.set_xlim(1, 1e3)
    ax_ps.set_yscale('log')
    ax_ps.set_xscale('log')
    ax_ps.legend()
    ax_ps.set_title(datatype + '-sampled')

    ax_alpha.set_xlabel(r'$\Delta$t (ms)')
    ax_alpha.set_ylabel(r'$\alpha$')
    ax_alpha.set_xscale('log')
    ax_alpha.set_xticks([1, 10])
    ax_alpha.set_xlim([1, 64])
    ax_alpha.plot([1, 100], [1.5, 1.5], '--')

    ax_mav.set_xlabel(r'$\Delta$t (ms)')
    ax_mav.set_ylabel(r'$m_{av}$')
    ax_mav.set_xscale('log')
    ax_mav.set_xticks([1, 10])
    ax_alpha.set_xlim([1, 64])
    ax_mav.plot([1, 100], [1, 1], '--')

    # #Fix the horrible ticks
    # for ax in [ax_mav,ax_alpha]:
    # 	for axis in [ax.xaxis, ax.yaxis]:
    # 		formatter = matplotlib.ticker.ScalarFormatter()
    # 		formatter.set_scientific(False)
    # 		axis.set_major_formatter(formatter)

    if save_fig is not None:
        fig_path = fig_dir + '/'.join(save_fig.split('/')[:-1]) + '/'
        if not os.path.exists(fig_path):
            os.makedirs(fig_path)
        ax_ps.set_title(save_fig)
        ax_ps.set_ylabel(datatype + '-sampled p(S)')
        str_save = fig_dir + save_fig + '_' + datatype + '.png'
        fig.savefig(str_save, bbox_inches="tight")