def main(dir, bin_size, bin_bound):
    container = NDataContainer(load_on_fly=True)
    container.add_axona_files_from_dir(dir, recursive=True)
    container.setup()
    print(container.string_repr(True))
    event = NEvent()
    last_stm_name = None
    dict_list = []

    for i in range(len(container)):
        try:
            result_dict = OrderedDict()
            data_index, _ = container._index_to_data_pos(i)
            stm_name = container.get_file_dict("STM")[data_index][0]
            data = container[i]

            # Add extra keys
            spike_file = data.spike.get_filename()
            spike_dir = os.path.dirname(spike_file)
            spike_name = os.path.basename(spike_file)
            spike_name_only, spike_ext = os.path.splitext(spike_name)
            spike_ext = spike_ext[1:]
            result_dict["Dir"] = spike_dir
            result_dict["Name"] = spike_name_only
            result_dict["Tet"] = int(spike_ext)
            result_dict["Unit"] = data.spike.get_unit_no()

            # Do analysis
            if last_stm_name != stm_name:
                event.load(stm_name, 'Axona')
                last_stm_name = stm_name

            graph_data = event.psth(data.spike, bins=bin_size, bound=bin_bound)
            spike_count = data.get_unit_spikes_count()
            result_dict["Num_Spikes"] = spike_count

            # Bin renaming
            for (b, v) in zip(graph_data["all_bins"][:-1], graph_data["psth"]):
                result_dict[str(b)] = v
            dict_list.append(result_dict)

            # Do plotting
            name = (spike_name_only + "_" + spike_ext + "_" +
                    str(result_dict["Unit"]) + ".png")
            plot_name = os.path.join(dir, "psth_results", name)
            make_dir_if_not_exists(plot_name)
            plot_psth(graph_data, plot_name)
            print("Saved psth to {}".format(plot_name))

        except Exception as e:
            log_exception(e, "During stimulation batch at {}".format(i))
            dict_list.append(result_dict)

    fname = os.path.join(dir, "psth_results", "psth.csv")
    save_dicts_to_csv(fname, dict_list)
    print("Saved results to {}".format(fname))
Esempio n. 2
0
def main(dir):
    save_dir = os.path.join(dir, "plots", "phase")
    container = NDataContainer(load_on_fly=True)
    container.add_axona_files_from_dir(dir)
    container.setup()
    spike_names = container.get_file_dict()["Spike"]

    for i, ndata in enumerate(container):
        name = spike_names[container._index_to_data_pos(i)[0]]
        results = ndata.phase_at_spikes(should_filter=True)
        positions = results["positions"]
        phases = results["phases"]
        good_place = results["good_place"]
        directions = results["directions"]
        co_ords = {}
        co_ords["north"] = np.nonzero((45 <= directions) & (directions < 135))
        co_ords["south"] = np.nonzero((225 <= directions)
                                      & (directions <= 315))
        if (phases.size != 0) and good_place:
            for direction in "north", "south":
                dim_pos = positions[1][co_ords[direction]]
                directional_phases = phases[co_ords[direction]]
                fig, ax = plt.subplots()
                ax.scatter(dim_pos, directional_phases)
                # ax.hist2d(dim_pos, directional_phases, bins=[10, 90])
                parts = os.path.basename(name[0]).split(".")
                end_name = (parts[0] + "_unit" + str(ndata.get_unit_no()) +
                            "_" + direction + ".png")
                out_name = os.path.join(save_dir, end_name)
                make_dir_if_not_exists(out_name)
                fig.savefig(out_name)
                plt.close(fig)
def main(dir):
    container = NDataContainer(load_on_fly=True)
    container.add_axona_files_from_dir(dir, recursive=True)
    container.setup()
    print(container.string_repr(True))
    nca.place_cell_summary(
        container, dpi=200, out_dirname="nc_spat_plots", output_format="pdf",
        burst_thresh=6, isi_bin_length=1, filter_place_cells=False)
Esempio n. 4
0
def main(dir):
    container = NDataContainer(load_on_fly=True)
    container.add_axona_files_from_dir(dir, recursive=True)
    container.setup()
    print(container.string_repr(True))
    nca.place_cell_summary(container, dpi=200, out_dirname="nc_spat_plots")
Esempio n. 5
0
def main(in_dir, tetrode):
    container = NDataContainer(load_on_fly=True)
    regex = ".*objects.*"
    container.add_axona_files_from_dir(
        in_dir, True, False, tetrode_list=[tetrode], re_filter=regex)
    container.setup()
    out_dir = os.path.join(in_dir, "nc_results")
    make_dir_if_not_exists(out_dir)
    out_dict = OrderedDict()
    headers = []
    base_list = ["NW", "NE", "SW", "SE"]
    for ap in ["Spikes", "Rate", "Norm_Spikes", "Norm_Rate"]:
        mod_list = [b + "_" + ap for b in base_list]
        headers = headers + mod_list
    out_dict["File"] = headers
    out_vec = OrderedDict()
    out_vec, out_dict = calculate_directional_stats(
        container, out_vec, out_dict)

    out_dict["Summary Stats Rate"] = [
        "Rot_Dist", "Rot_U_Dist", "Shuf_Dist", "Shuf_U_Dist", "Shuf_UR_Dist"]
    out_dict["Summary Stats Rate"] += ["Rate" + b for b in base_list]
    out_dict["Summary Stats Rate"] += ["Rot Rate" + b for b in base_list]
    out_dict["Summary Stats Rate"] += ["Undo Rot Rate" + b for b in base_list]
    out_dict["Summary Stats Rate"] += ["Shuf Rate" + b for b in base_list]
    out_dict["Summary Stats Rate"] += ["Undo Shuf Rate" + b for b in base_list]
    out_dict["Summary Stats Rate"] += ["Undo ShufR Rate" + b for b in base_list]
    for key, vec in out_vec.items():
        res, p_vecs, pu_vecs, ur = distance_between(
            vec, key=key, measure=euc_dist)
        out_dict["Rate Unit " + str(key)] = np.concatenate(
            [res, p_vecs[0], p_vecs[1], pu_vecs[1], p_vecs[2], pu_vecs[2], ur])
        fig, ax = plt.subplots()
        heat_arr = np.zeros(shape=(2, 12))
        heat_arr[:, :2] = p_vecs[0].reshape(2, 2)
        heat_arr[:, 2:4] = p_vecs[1].reshape(2, 2)
        heat_arr[:, 4:6] = pu_vecs[1].reshape(2, 2)
        heat_arr[:, 6:8] = p_vecs[2].reshape(2, 2)
        heat_arr[:, 8:10] = pu_vecs[2].reshape(2, 2)
        heat_arr[:, 10:] = ur.reshape(2, 2)
        sns.heatmap(
            heat_arr, ax=ax, annot=True, square=True, center=0.25,
            cmap="Blues")
        ax.invert_yaxis()
        ax.set_ylim(2, 0)
        ax.set_xlim(0, 12)
        ax.vlines([k for k in range(2, 12, 2)], 2, 0, colors="r")
        fig.savefig(
            os.path.join(out_dir, str(key) + "_heatmap.png"))

    out_dict["Summary Stats Rank"] = [
        "Rot_Dist", "Rot_U_Dist", "Shuf_Dist", "Shuf_U_Dist", "Shuf_UR_Dist"]
    out_dict["Summary Stats Rank"] += ["Rank" + b for b in base_list]
    out_dict["Summary Stats Rank"] += ["Rot Rank" + b for b in base_list]
    out_dict["Summary Stats Rank"] += ["Undo Rot Rank" + b for b in base_list]
    out_dict["Summary Stats Rank"] += ["Shuf Rank" + b for b in base_list]
    out_dict["Summary Stats Rank"] += ["Undo Shuf Rank" + b for b in base_list]
    out_dict["Summary Stats Rank"] += ["Undo ShufR Rank" + b for b in base_list]
    out_vec = to_rank(out_vec)
    for key, vec in out_vec.items():
        res, p_vecs, pu_vecs, ur = distance_between(
            vec, key=key, measure=euc_dist)
        out_dict["Rank Unit " + str(key)] = np.concatenate(
            [res, p_vecs[0], p_vecs[1], pu_vecs[1], p_vecs[2], pu_vecs[2], ur])
    print("Saving results to", os.path.join(
        out_dir, str(tetrode) + "_obj.csv"))
    save_mixed_dict_to_csv(out_dict, out_dir, str(tetrode) + "_obj.csv")
def main(args, config):
    # Unpack out the cfg file into easier names
    in_dir = config.get("Setup", "in_dir")
    regex_filter = config.get("Setup", "regex_filter")
    regex_filter = None if regex_filter == "None" else regex_filter
    analysis_flags = json.loads(config.get("Setup", "analysis_flags"))
    tetrode_list = json.loads(config.get("Setup", "tetrode_list"))
    should_filter = config.getboolean("Setup", "should_filter")
    seaborn_style = config.getboolean("Plot", "seaborn_style")
    plot_order = json.loads(config.get("Plot", "plot_order"))
    fixed_color = config.get("Plot", "path_color")
    fixed_color = None if fixed_color == "None" else fixed_color
    if len(fixed_color) > 1:
        fixed_color = json.loads(fixed_color)
    s_color = config.getboolean("Plot", "should_color")
    plot_outname = config.get("Plot", "output_dirname")
    dot_size = config.get("Plot", "dot_size")
    dot_size = None if dot_size == "None" else int(dot_size)
    summary_dpi = int(config.get("Plot", "summary_dpi"))
    hd_predict = config.getboolean("Plot", "hd_predict")
    output_format = config.get("Output", "output_format")
    save_bin_data = config.getboolean("Output", "save_bin_data")
    output_spaces = config.getboolean("Output", "output_spaces")
    opt_end = config.get("Output", "optional_end")
    max_units = int(config.get("Setup", "max_units"))
    isi_bound = int(config.get("Params", "isi_bound"))
    isi_bin_length = int(config.get("Params", "isi_bin_length"))

    setup_logging(in_dir)

    if output_format == "pdf":
        matplotlib.use("pdf")

    if seaborn_style:
        sns.set(palette="colorblind")
    else:
        sns.set_style("ticks", {
            'axes.spines.right': False,
            'axes.spines.top': False
        })

    # Automatic extraction of files from starting dir onwards
    container = NDataContainer(load_on_fly=True)
    out_name = container.add_axona_files_from_dir(in_dir,
                                                  tetrode_list=tetrode_list,
                                                  recursive=True,
                                                  re_filter=regex_filter,
                                                  verbose=False,
                                                  unit_cutoff=(0, max_units))
    container.setup()
    if len(container) is 0:
        print(
            "Unable to find any files matching regex {}".format(regex_filter))
        exit(-1)

    # Show summary of place
    if analysis_flags[0]:
        place_cell_summary(container,
                           dpi=summary_dpi,
                           out_dirname=plot_outname,
                           filter_place_cells=should_filter,
                           filter_low_freq=should_filter,
                           opt_end=opt_end,
                           base_dir=in_dir,
                           output_format=output_format,
                           isi_bound=isi_bound,
                           isi_bin_length=isi_bin_length,
                           output=plot_order,
                           save_data=save_bin_data,
                           fixed_color=fixed_color,
                           point_size=dot_size,
                           color_isi=s_color,
                           burst_thresh=6,
                           hd_predict=hd_predict)
        plt.close("all")

    # Do numerical analysis of bursting
    should_plot = analysis_flags[2]
    if analysis_flags[1]:
        import re
        out_name = remove_extension(out_name) + "csv"
        out_name = re.sub(r"file_list_", r"cell_stats_", out_name)
        print("Computing cell stats to save to {}".format(out_name))
        cell_classification_stats(in_dir,
                                  container,
                                  out_name,
                                  should_plot=should_plot,
                                  opt_end=opt_end,
                                  output_spaces=output_spaces)

    # Do PCA based analysis
    if analysis_flags[3]:
        print("Computing pca clustering")
        pca_clustering(container, in_dir, opt_end=opt_end, s_color=s_color)
                    for u in units:
                        if u in t_units[str(t)]:
                            t_units[str(t)][u].append((r_type, idx))
                else:
                    t_units[str(t)] = OrderedDict()
                    for u in units:
                        t_units[str(t)][u] = [(r_type, idx)]

            for tetrode, units in t_units.items():
                for unit, idxs in units.items():
                    f.write(key + "__" + str(tetrode) + "__" + str(unit) + ",")
                    burst_arr = np.full(17, np.nan)
                    for i in idxs:
                        unit_o_idx = container.get_units(i[1]).index(unit)
                        data = container.get_data_at(i[1], unit_o_idx)
                        data.burst()
                        p_burst = data.get_results()["Propensity to burst"]
                        burst_arr[i[0] - 1] = p_burst
                    o_str = ""
                    for b in burst_arr:
                        o_str = o_str + "{},".format(b)
                    f.write(o_str[:-1] + "\n")


if __name__ == "__main__":
    from neurochat.nc_datacontainer import NDataContainer
    in_dir = r"D:\SubRet_recordings_imaging\muscimol_data\CanCSR7_muscimol\2_03082018"
    ndc = NDataContainer(load_on_fly=True)
    ndc.add_axona_files_from_dir(in_dir, recursive=True)
    time_resolved_check(in_dir, ndc)