Exemple #1
0
def test_clusters_metrics():
    np.random.seed(54)
    rec_length = 1000
    frs = np.array([3, 100, 80, 40])  # firing rates
    cid = [0, 1, 3, 4]  # here we make sure one of the clusters has no spike
    t, a, c = multiple_spike_trains(firing_rates=frs,
                                    rec_len_secs=rec_length,
                                    cluster_ids=cid)
    d = np.sin(2 * np.pi * c / rec_length *
               t) * 100  # sinusoidal shift where cluster id drives f

    def _assertions(dfm, idf, target_cid):
        # dfm: qc dataframe, idf: indices of existing clusters in dfm, cid: cluster ids
        assert np.allclose(dfm['amp_median'][idf] / np.exp(5.5) * 1e6,
                           1,
                           rtol=1.1)
        assert np.allclose(dfm['amp_std_dB'][idf] / 20 * np.log10(np.exp(0.5)),
                           1,
                           rtol=1.1)
        assert np.allclose(dfm['drift'][idf],
                           np.array(cid) * 100 * 4 * 3.6,
                           rtol=1.1)
        assert np.allclose(dfm['firing_rate'][idf], frs, rtol=1.1)
        assert np.allclose(dfm['cluster_id'], target_cid)

    # check with missing clusters
    dfm = quick_unit_metrics(c,
                             t,
                             a,
                             d,
                             cluster_ids=np.arange(5),
                             tbounds=[100, 900])
    idf, _ = ismember(np.arange(5), cid)
    _assertions(dfm, idf, np.arange(5))
Exemple #2
0
def run_metrics(probe_path):
    """
    Launch phy given an eid and probe name.
    TODO calculate metrics and save as .tsvs to include in GUI when launching?
    """
    # This is a first draft, no error handling and a draft dataset list.
    probe_path = Path(probe_path)
    if probe_path.name == 'pykilosort':
        ses_path = probe_path.parent.parent.parent
        probe_name = probe_path.parent.name
    else:
        ses_path = probe_path.parent.parent
        probe_name = probe_path.name
    ephys_file_dir = ses_path.joinpath('raw_ephys_data', probe_name)
    raw_files = glob.glob(os.path.join(ephys_file_dir, '*ap.*bin'))
    raw_file = [raw_files[0]] if raw_files else None
    cluster_metrics_path = probe_path.joinpath('clusters_metrics.ibl.pqt')
    if not cluster_metrics_path.exists():
        print('computing metrics this may take a bit of time')
        spikes = one.alf.io.load_object(probe_path, 'spikes',
                                 attribute=['depths', 'times', 'amps', 'clusters'])
        clusters = one.alf.io.load_object(probe_path, 'clusters', attribute=['channels',''])
        r = quick_unit_metrics(spikes.clusters, spikes.times, spikes.amps, spikes.depths)
        r = pd.DataFrame(r)
        r.to_parquet(cluster_metrics_path)