Ejemplo n.º 1
0
def test_export_to_phy():

    repo = 'https://gin.g-node.org/NeuralEnsemble/ephy_testing_data'
    remote_path = 'mearec/mearec_test_10s.h5'
    local_path = download_dataset(repo=repo,
                                  remote_path=remote_path,
                                  local_folder=None)
    recording = se.MEArecRecordingExtractor(local_path)
    sorting = se.MEArecSortingExtractor(local_path)

    waveform_folder = Path('waveforms')
    output_folder = Path('phy_output')

    for f in (waveform_folder, output_folder):
        if f.is_dir():
            shutil.rmtree(f)

    waveform_extractor = extract_waveforms(recording, sorting, waveform_folder)

    export_to_phy(recording,
                  sorting,
                  output_folder,
                  waveform_extractor,
                  compute_pc_features=True,
                  compute_amplitudes=True,
                  max_channels_per_template=8,
                  n_jobs=1,
                  chunk_size=10000,
                  progress_bar=True)
Ejemplo n.º 2
0
def test_get_unit_amplitudes():
    repo = 'https://gin.g-node.org/NeuralEnsemble/ephy_testing_data'
    remote_path = 'mearec/mearec_test_10s.h5'
    local_path = download_dataset(repo=repo,
                                  remote_path=remote_path,
                                  local_folder=None)
    recording = se.MEArecRecordingExtractor(local_path)
    sorting = se.MEArecSortingExtractor(local_path)

    we = extract_waveforms(recording,
                           sorting,
                           'mearec_waveforms',
                           ms_before=1.,
                           ms_after=2.,
                           max_spikes_per_unit=500,
                           n_jobs=1,
                           chunk_size=30000,
                           load_if_exists=True)

    amplitudes = get_unit_amplitudes(we,
                                     peak_sign='neg',
                                     outputs='concatenated',
                                     chunk_size=10000,
                                     n_jobs=1)
    # print(amplitudes)

    amplitudes = get_unit_amplitudes(we,
                                     peak_sign='neg',
                                     outputs='by_units',
                                     chunk_size=10000,
                                     n_jobs=1)
Ejemplo n.º 3
0
def load_data(filepath):
    '''
    Parameters
    -----------
    filepath: str

    Returns
    -----------
    prep_recording: RecordingExtractor
        preprocessed RecordingExtractor
    sorting_GT: SortingExtractor
    '''
    recording = se.MEArecRecordingExtractor(filepath)
    sorting_GT = se.MEArecSortingExtractor(filepath)

    # recording info
    fs = recording.get_sampling_frequency()
    channel_ids = recording.get_channel_ids()
    channel_loc = recording.get_channel_locations()
    num_frames = recording.get_num_frames()
    duration = recording.frame_to_time(num_frames)
    print(f'Sampling frequency:{fs}')
    print(f'Channel ids:{channel_ids}')
    print(f'channel location:{channel_loc}')
    print(f'frame num:{num_frames}')
    print(f'recording duration:{duration}')
    # sorting_GT info
    unit_ids = sorting_GT.get_unit_ids()
    print(f'unit ids:{unit_ids}')
    return recording, sorting_GT
Ejemplo n.º 4
0
def test_get_unit_colors():
    local_path = download_dataset(remote_path='mearec/mearec_test_10s.h5')
    rec = se.MEArecRecordingExtractor(local_path)
    sorting = se.MEArecSortingExtractor(local_path)

    colors = get_unit_colors(sorting)
    print(colors)
Ejemplo n.º 5
0
    def generate_a_dataset(self, config:dict=None):
        """
        Generate a dataset (recording, sorting_true) with parameter from config file.

        TODO: currently, the config file is out-of-operation.
        """
        # return se.example_datasets.toy_example(num_channels=10, duration=50)
        recording = se.MEArecRecordingExtractor(config['file_path'])
        sorting_true = se.MEArecSortingExtractor(config['file_path'])
        return recording, sorting_true
def test_compute_correlograms():
    repo = 'https://gin.g-node.org/NeuralEnsemble/ephy_testing_data'
    remote_path = 'mearec/mearec_test_10s.h5'
    local_path = download_dataset(repo=repo,
                                  remote_path=remote_path,
                                  local_folder=None)
    # ~ recording = se.MEArecRecordingExtractor(local_path)
    sorting = se.MEArecSortingExtractor(local_path)

    unit_ids = sorting.unit_ids
    sorting2 = sorting.select_units(unit_ids[:3])
    correlograms, bins = compute_correlograms(sorting2)
Ejemplo n.º 7
0
    def setUp(self):
        #~ self._rec, self._sorting = se.toy_example(num_channels=10, duration=10, num_segments=1)
        #~ self._rec = self._rec.save()
        #~ self._sorting = self._sorting.save()
        local_path = download_dataset(remote_path='mearec/mearec_test_10s.h5')
        self._rec = se.MEArecRecordingExtractor(local_path)

        self._sorting = se.MEArecSortingExtractor(local_path)

        self.num_units = len(self._sorting.get_unit_ids())
        #  self._we = extract_waveforms(self._rec, self._sorting, './toy_example', load_if_exists=True)
        self._we = extract_waveforms(self._rec, self._sorting, './mearec_test', load_if_exists=True)

        self._amplitudes = st.get_spike_amplitudes(self._we, peak_sign='neg', outputs='by_unit')
        self._gt_comp = sc.compare_sorter_to_ground_truth(self._sorting, self._sorting)
Ejemplo n.º 8
0
def test_export_to_phy_by_sparsity():
    repo = 'https://gin.g-node.org/NeuralEnsemble/ephy_testing_data'
    remote_path = 'mearec/mearec_test_10s.h5'
    local_path = download_dataset(repo=repo,
                                  remote_path=remote_path,
                                  local_folder=None)
    recording = se.MEArecRecordingExtractor(local_path)
    sorting = se.MEArecSortingExtractor(local_path)

    waveform_folder = Path('waveforms')
    output_folder_radius = Path('phy_output_radius')
    output_folder_thr = Path('phy_output_thr')

    for f in (waveform_folder, output_folder_radius, output_folder_thr):
        if f.is_dir():
            shutil.rmtree(f)

    waveform_extractor = extract_waveforms(recording, sorting, waveform_folder)

    export_to_phy(waveform_extractor,
                  output_folder_radius,
                  compute_pc_features=True,
                  compute_amplitudes=True,
                  max_channels_per_template=None,
                  sparsity_dict=dict(method="radius", radius_um=50),
                  n_jobs=1,
                  chunk_size=10000,
                  progress_bar=True)

    template_ind = np.load(output_folder_radius / "template_ind.npy")
    # templates have different shapes!
    assert -1 in template_ind

    export_to_phy(waveform_extractor,
                  output_folder_thr,
                  compute_pc_features=True,
                  compute_amplitudes=True,
                  max_channels_per_template=None,
                  sparsity_dict=dict(method="threshold", threshold=2),
                  n_jobs=1,
                  chunk_size=10000,
                  progress_bar=True)

    template_ind = np.load(output_folder_thr / "template_ind.npy")
    # templates have different shapes!
    assert -1 in template_ind
def test_compute_spike_amplitudes_parallel():
    repo = 'https://gin.g-node.org/NeuralEnsemble/ephy_testing_data'
    remote_path = 'mearec/mearec_test_10s.h5'
    local_path = download_dataset(repo=repo,
                                  remote_path=remote_path,
                                  local_folder=None)
    recording = se.MEArecRecordingExtractor(local_path)
    sorting = se.MEArecSortingExtractor(local_path)

    folder = Path('mearec_waveforms_all')

    we = extract_waveforms(recording,
                           sorting,
                           folder,
                           ms_before=1.,
                           ms_after=2.,
                           max_spikes_per_unit=None,
                           n_jobs=1,
                           chunk_size=30000,
                           load_if_exists=True)

    amplitudes1 = compute_spike_amplitudes(we,
                                           peak_sign='neg',
                                           load_if_exists=False,
                                           outputs='concatenated',
                                           chunk_size=10000,
                                           n_jobs=1)
    # TODO : fix multi processing for spike amplitudes!!!!!!!
    amplitudes2 = compute_spike_amplitudes(we,
                                           peak_sign='neg',
                                           load_if_exists=False,
                                           outputs='concatenated',
                                           chunk_size=10000,
                                           n_jobs=2)

    assert np.array_equal(amplitudes1[0], amplitudes2[0])
Ejemplo n.º 10
0
import spikeinterface as si
import spikeinterface.extractors as se
import spikeinterface.toolkit as st
import spikeinterface.sorters as ss

##############################################################################
# First, let's download a simulated dataset
#  on the repo 'https://gin.g-node.org/NeuralEnsemble/ephy_testing_data'
#
# Let's imagine that that sorting is in fact the output of a sorters.
# 

local_path = si.download_dataset(remote_path='mearec/mearec_test_10s.h5')
recording = se.MEArecRecordingExtractor(local_path)
sorting = se.MEArecSortingExtractor(local_path)
print(recording)
print(sorting)

##############################################################################
# Firt, we extractor waveforms and compute PC on it.

folder = 'waveforms_mearec'
we = si.extract_waveforms(recording, sorting, folder,
    load_if_exists=True,
    ms_before=1, ms_after=2., max_spikes_per_unit=500,
    n_jobs=1, chunk_size=30000)
print(we)

pc = st.compute_principal_components(we, load_if_exists=True,
            n_components=3, mode='by_channel_local')
Ejemplo n.º 11
0
##############################################################################
# Read MEArec give 2 object

recording, sorting = se.read_mearec(mearec_folder_path)
print(recording)
print(type(recording))
print()
print(sorting)
print(type(sorting))


##############################################################################
# This equivalent to do this

recording = se.MEArecRecordingExtractor(mearec_folder_path)
sorting = se.MEArecSortingExtractor(mearec_folder_path)

##############################################################################
# recording and sorting object can be plot quickly with the widgets submodule

import spikeinterface.widgets as sw

w_ts = sw.plot_timeseries(recording, time_range=(0, 5))
w_rs = sw.plot_rasters(sorting, time_range=(0, 5))

plt.show()




Ejemplo n.º 12
0
 def _loading_sorting(file_path):
     return se.MEArecSortingExtractor(file_path)
def test_compute_spike_amplitudes():
    repo = 'https://gin.g-node.org/NeuralEnsemble/ephy_testing_data'
    remote_path = 'mearec/mearec_test_10s.h5'
    local_path = download_dataset(repo=repo,
                                  remote_path=remote_path,
                                  local_folder=None)
    recording = se.MEArecRecordingExtractor(local_path)
    sorting = se.MEArecSortingExtractor(local_path)

    folder = Path('mearec_waveforms')

    we = extract_waveforms(recording,
                           sorting,
                           folder,
                           ms_before=1.,
                           ms_after=2.,
                           max_spikes_per_unit=500,
                           n_jobs=1,
                           chunk_size=30000,
                           load_if_exists=False,
                           overwrite=True)

    amplitudes = compute_spike_amplitudes(we,
                                          peak_sign='neg',
                                          outputs='concatenated',
                                          chunk_size=10000,
                                          n_jobs=1)
    amplitudes = compute_spike_amplitudes(we,
                                          peak_sign='neg',
                                          outputs='by_unit',
                                          chunk_size=10000,
                                          n_jobs=1)

    gain = 0.1
    recording.set_channel_gains(gain)
    recording.set_channel_offsets(0)

    folder = Path('mearec_waveforms_scaled')

    we_scaled = extract_waveforms(recording,
                                  sorting,
                                  folder,
                                  ms_before=1.,
                                  ms_after=2.,
                                  max_spikes_per_unit=500,
                                  n_jobs=1,
                                  chunk_size=30000,
                                  load_if_exists=False,
                                  overwrite=True,
                                  return_scaled=True)

    amplitudes_scaled = compute_spike_amplitudes(we_scaled,
                                                 peak_sign='neg',
                                                 outputs='concatenated',
                                                 chunk_size=10000,
                                                 n_jobs=1,
                                                 return_scaled=True)
    amplitudes_unscaled = compute_spike_amplitudes(we_scaled,
                                                   peak_sign='neg',
                                                   outputs='concatenated',
                                                   chunk_size=10000,
                                                   n_jobs=1,
                                                   return_scaled=False)

    assert np.allclose(amplitudes_scaled[0], amplitudes_unscaled[0] * gain)

    # reload as an extension from we
    assert SpikeAmplitudesCalculator in we.get_available_extensions()
    assert we_scaled.is_extension('spike_amplitudes')
    sac = we.load_extension('spike_amplitudes')
    assert isinstance(sac, SpikeAmplitudesCalculator)
    assert sac._amplitudes is not None
    qmc = SpikeAmplitudesCalculator.load_from_folder(folder)
    assert sac._amplitudes is not None
Ejemplo n.º 14
0
    ax.yaxis.set_tick_params(labelsize=12)
    ax.set_xticklabels(sorters, fontsize=15, rotation=45, ha='center')
    ax.spines['top'].set_visible(False)
    ax.spines['right'].set_visible(False)
    fig = ax.get_figure()
    fig.subplots_adjust(bottom=0.2)
    fig.set_size_inches(8, 7)


p = Path('.')

study_folder = p / 'study_mearec_SqMEA1015um'

mearec_filename = p / 'recordings_50cells_SqMEA-10-15um_60.0_10.0uV_27-03-2019_13-31.h5'
rec0 = se.MEArecRecordingExtractor(mearec_filename)
gt_sorting0 = se.MEArecSortingExtractor(mearec_filename)
study = None

# Setup study folder
if not study_folder.is_dir():
    print('Setting up study folder:', study_folder)
    gt_dict = {'rec0': (rec0, gt_sorting0)}
    study = GroundTruthStudy.create(study_folder, gt_dict)

if study is None:
    study = GroundTruthStudy(study_folder)

# Run sorters
sorter_list = [
    'herdingspikes', 'kilosort2', 'ironclust', 'spykingcircus', 'tridesclous',
    'mountainsort4'
Ejemplo n.º 15
0
def test_get_spike_amplitudes():
    repo = 'https://gin.g-node.org/NeuralEnsemble/ephy_testing_data'
    remote_path = 'mearec/mearec_test_10s.h5'
    local_path = download_dataset(repo=repo,
                                  remote_path=remote_path,
                                  local_folder=None)
    recording = se.MEArecRecordingExtractor(local_path)
    sorting = se.MEArecSortingExtractor(local_path)

    folder = Path('mearec_waveforms')

    we = extract_waveforms(recording,
                           sorting,
                           folder,
                           ms_before=1.,
                           ms_after=2.,
                           max_spikes_per_unit=500,
                           n_jobs=1,
                           chunk_size=30000,
                           load_if_exists=False,
                           overwrite=True)

    amplitudes = get_spike_amplitudes(we,
                                      peak_sign='neg',
                                      outputs='concatenated',
                                      chunk_size=10000,
                                      n_jobs=1)
    amplitudes = get_spike_amplitudes(we,
                                      peak_sign='neg',
                                      outputs='by_unit',
                                      chunk_size=10000,
                                      n_jobs=1)

    gain = 0.1
    recording.set_channel_gains(gain)
    recording.set_channel_offsets(0)

    folder = Path('mearec_waveforms_scaled')

    we_scaled = extract_waveforms(recording,
                                  sorting,
                                  folder,
                                  ms_before=1.,
                                  ms_after=2.,
                                  max_spikes_per_unit=500,
                                  n_jobs=1,
                                  chunk_size=30000,
                                  load_if_exists=False,
                                  overwrite=True,
                                  return_scaled=True)

    amplitudes_scaled = get_spike_amplitudes(we_scaled,
                                             peak_sign='neg',
                                             outputs='concatenated',
                                             chunk_size=10000,
                                             n_jobs=1,
                                             return_scaled=True)
    amplitudes_unscaled = get_spike_amplitudes(we_scaled,
                                               peak_sign='neg',
                                               outputs='concatenated',
                                               chunk_size=10000,
                                               n_jobs=1,
                                               return_scaled=False)

    assert np.allclose(amplitudes_scaled[0], amplitudes_unscaled[0] * gain)