Exemple #1
0
def test_get_unit_colors():
    local_path = download_dataset(remote_path='mearec/mearec_test_10s.h5')
    rec = se.MEArecRecordingExtractor(local_path)
    sorting = se.MEArecSortingExtractor(local_path)

    colors = get_unit_colors(sorting)
    print(colors)
def test_localize_peaks():

    repo = 'https://gin.g-node.org/NeuralEnsemble/ephy_testing_data'
    remote_path = 'mearec/mearec_test_10s.h5'
    local_path = download_dataset(repo=repo,
                                  remote_path=remote_path,
                                  local_folder=None)
    recording = MEArecRecordingExtractor(local_path)

    peaks = detect_peaks(recording,
                         method='locally_exclusive',
                         peak_sign='neg',
                         detect_threshold=5,
                         n_shifts=2,
                         chunk_size=10000,
                         verbose=False,
                         progress_bar=False)

    peak_locations = localize_peaks(recording,
                                    peaks,
                                    method='center_of_mass',
                                    chunk_size=10000,
                                    verbose=True,
                                    progress_bar=False)
    assert peaks.size == peak_locations.shape[0]

    peak_locations = localize_peaks(recording,
                                    peaks,
                                    method='monopolar_triangulation',
                                    n_jobs=1,
                                    chunk_size=10000,
                                    verbose=True,
                                    progress_bar=True)
    assert peaks.size == peak_locations.shape[0]
Exemple #3
0
def test_get_unit_amplitudes():
    repo = 'https://gin.g-node.org/NeuralEnsemble/ephy_testing_data'
    remote_path = 'mearec/mearec_test_10s.h5'
    local_path = download_dataset(repo=repo,
                                  remote_path=remote_path,
                                  local_folder=None)
    recording = se.MEArecRecordingExtractor(local_path)
    sorting = se.MEArecSortingExtractor(local_path)

    we = extract_waveforms(recording,
                           sorting,
                           'mearec_waveforms',
                           ms_before=1.,
                           ms_after=2.,
                           max_spikes_per_unit=500,
                           n_jobs=1,
                           chunk_size=30000,
                           load_if_exists=True)

    amplitudes = get_unit_amplitudes(we,
                                     peak_sign='neg',
                                     outputs='concatenated',
                                     chunk_size=10000,
                                     n_jobs=1)
    # print(amplitudes)

    amplitudes = get_unit_amplitudes(we,
                                     peak_sign='neg',
                                     outputs='by_units',
                                     chunk_size=10000,
                                     n_jobs=1)
Exemple #4
0
def test_run_sorter_singularity():
    mearec_filename = download_dataset(remote_path='mearec/mearec_test_10s.h5',
                                       unlock=True)
    output_folder = 'sorting_tdc_singularity'

    recording, sorting_true = read_mearec(mearec_filename)

    sorter_params = {'detect_threshold': 4.9}

    singularity_image = 'spikeinterface/tridesclous-base:1.6.4-1'

    sorting = run_sorter('tridesclous',
                         recording,
                         output_folder=output_folder,
                         remove_existing_folder=True,
                         delete_output_folder=False,
                         verbose=True,
                         raise_error=True,
                         singularity_image=singularity_image,
                         **sorter_params)
    print(sorting)

    # basic check to confirm sorting was successful
    assert 'Tridesclous' in sorting.to_dict()['class']
    assert len(sorting.get_unit_ids()) > 0
Exemple #5
0
def test_export_to_phy():

    repo = 'https://gin.g-node.org/NeuralEnsemble/ephy_testing_data'
    remote_path = 'mearec/mearec_test_10s.h5'
    local_path = download_dataset(repo=repo,
                                  remote_path=remote_path,
                                  local_folder=None)
    recording = se.MEArecRecordingExtractor(local_path)
    sorting = se.MEArecSortingExtractor(local_path)

    waveform_folder = Path('waveforms')
    output_folder = Path('phy_output')

    for f in (waveform_folder, output_folder):
        if f.is_dir():
            shutil.rmtree(f)

    waveform_extractor = extract_waveforms(recording, sorting, waveform_folder)

    export_to_phy(recording,
                  sorting,
                  output_folder,
                  waveform_extractor,
                  compute_pc_features=True,
                  compute_amplitudes=True,
                  max_channels_per_template=8,
                  n_jobs=1,
                  chunk_size=10000,
                  progress_bar=True)
def test_find_spike_from_templates():
    repo = 'https://gin.g-node.org/NeuralEnsemble/ephy_testing_data'
    remote_path = 'mearec/mearec_test_10s.h5'
    local_path = download_dataset(repo=repo,
                                  remote_path=remote_path,
                                  local_folder=None)
    recording, gt_sorting = read_mearec(local_path)

    folder = 'waveforms_mearec'
    we = extract_waveforms(recording,
                           gt_sorting,
                           folder,
                           load_if_exists=True,
                           ms_before=1,
                           ms_after=2.,
                           max_spikes_per_unit=500,
                           n_jobs=1,
                           chunk_size=30000)
    # ~ print(we)

    spikes = find_spike_from_templates(
        recording,
        we,
        method='simple',
    )
    print(spikes)
Exemple #7
0
def test_estimate_motion_rigid():
    local_path = download_dataset(repo=repo,
                                  remote_path=remote_path,
                                  local_folder=None)
    recording = MEArecRecordingExtractor(local_path)
    print(recording)
    peaks = np.load('mearec_detected_peaks.npy')

    motion, temporal_bins, spatial_bins, extra_check = estimate_motion(
        recording,
        peaks,
        peak_locations=None,
        direction='y',
        bin_duration_s=1.,
        bin_um=10.,
        margin_um=5,
        method='decentralized_registration',
        method_kwargs={},
        non_rigid_kwargs=None,
        output_extra_check=True,
        progress_bar=True,
        verbose=True)
    # print(motion)
    # print(extra_check)
    print(spatial_bins)

    assert spatial_bins is None
Exemple #8
0
def test_detect_peaks():
    repo = 'https://gin.g-node.org/NeuralEnsemble/ephy_testing_data'
    remote_path = 'mearec/mearec_test_10s.h5'
    local_path = download_dataset(repo=repo,
                                  remote_path=remote_path,
                                  local_folder=None)
    recording = MEArecRecordingExtractor(local_path)

    peaks = detect_peaks(recording,
                         method='by_channel',
                         peak_sign='neg',
                         detect_threshold=5,
                         n_shifts=2,
                         chunk_size=10000,
                         verbose=1,
                         progress_bar=False,
                         outputs='numpy_compact')

    sample_inds, chan_inds, amplitudes, seg_inds = detect_peaks(
        recording,
        method='locally_exclusive',
        peak_sign='neg',
        detect_threshold=5,
        n_shifts=2,
        chunk_size=10000,
        verbose=1,
        progress_bar=False,
        outputs='numpy_split')
Exemple #9
0
def test_detect_peaks():

    repo = 'https://gin.g-node.org/NeuralEnsemble/ephy_testing_data'
    remote_path = 'mearec/mearec_test_10s.h5'
    local_path = download_dataset(repo=repo,
                                  remote_path=remote_path,
                                  local_folder=None)
    recording = MEArecRecordingExtractor(local_path)

    # by_channel
    peaks = detect_peaks(recording,
                         method='by_channel',
                         peak_sign='neg',
                         detect_threshold=5,
                         n_shifts=2,
                         chunk_size=10000,
                         verbose=1,
                         progress_bar=False)

    # by_channel
    sorting = detect_peaks(recording,
                           method='by_channel',
                           peak_sign='neg',
                           detect_threshold=5,
                           n_shifts=2,
                           chunk_size=10000,
                           verbose=1,
                           progress_bar=False,
                           outputs="sorting")
    assert isinstance(sorting, BaseSorting)

    # locally_exclusive
    peaks = detect_peaks(recording,
                         method='locally_exclusive',
                         peak_sign='neg',
                         detect_threshold=5,
                         n_shifts=2,
                         chunk_size=10000,
                         verbose=1,
                         progress_bar=False)

    # locally_exclusive + localization
    peaks = detect_peaks(
        recording,
        method='locally_exclusive',
        peak_sign='neg',
        detect_threshold=5,
        n_shifts=2,
        chunk_size=10000,
        verbose=1,
        progress_bar=True,
        localization_dict=dict(method='center_of_mass',
                               local_radius_um=150,
                               ms_before=0.1,
                               ms_after=0.3),
        #localization_dict=dict(method='monopolar_triangulation', local_radius_um=150,
        #                       ms_before=0.1, ms_after=0.3, max_distance_um=1000)
    )
    assert 'x' in peaks.dtype.fields
Exemple #10
0
def test_run_sorter_local():
    local_path = download_dataset(remote_path='mearec/mearec_test_10s.h5')
    recording, sorting_true = read_mearec(local_path)

    sorter_params = {'detect_threshold': 4.9}

    sorting = run_sorter('tridesclous', recording, output_folder='sorting_tdc_local',
                         remove_existing_folder=True, delete_output_folder=False,
                         verbose=True, raise_error=True, docker_image=None,
                         **sorter_params)
    print(sorting)
def test_compute_correlograms():
    repo = 'https://gin.g-node.org/NeuralEnsemble/ephy_testing_data'
    remote_path = 'mearec/mearec_test_10s.h5'
    local_path = download_dataset(repo=repo,
                                  remote_path=remote_path,
                                  local_folder=None)
    # ~ recording = se.MEArecRecordingExtractor(local_path)
    sorting = se.MEArecSortingExtractor(local_path)

    unit_ids = sorting.unit_ids
    sorting2 = sorting.select_units(unit_ids[:3])
    correlograms, bins = compute_correlograms(sorting2)
Exemple #12
0
def setup_module():
    for folder in ('mearec_waveforms'):
        if Path(folder).is_dir():
            shutil.rmtree(folder)
    
    local_path = download_dataset(remote_path='mearec/mearec_test_10s.h5')
    recording, sorting = read_mearec(local_path)
    print(recording)
    print(sorting)
    
    we = extract_waveforms(recording, sorting, 'mearec_waveforms',
        ms_before=3., ms_after=4., max_spikes_per_unit=500,
        load_if_exists=True,
        n_jobs=1, chunk_size=30000)
Exemple #13
0
    def setUp(self):
        #~ self._rec, self._sorting = se.toy_example(num_channels=10, duration=10, num_segments=1)
        #~ self._rec = self._rec.save()
        #~ self._sorting = self._sorting.save()
        local_path = download_dataset(remote_path='mearec/mearec_test_10s.h5')
        self._rec = se.MEArecRecordingExtractor(local_path)

        self._sorting = se.MEArecSortingExtractor(local_path)

        self.num_units = len(self._sorting.get_unit_ids())
        #  self._we = extract_waveforms(self._rec, self._sorting, './toy_example', load_if_exists=True)
        self._we = extract_waveforms(self._rec, self._sorting, './mearec_test', load_if_exists=True)

        self._amplitudes = st.get_spike_amplitudes(self._we, peak_sign='neg', outputs='by_unit')
        self._gt_comp = sc.compare_sorter_to_ground_truth(self._sorting, self._sorting)
def test_export_to_phy_by_sparsity():
    repo = 'https://gin.g-node.org/NeuralEnsemble/ephy_testing_data'
    remote_path = 'mearec/mearec_test_10s.h5'
    local_path = download_dataset(repo=repo,
                                  remote_path=remote_path,
                                  local_folder=None)
    recording = se.MEArecRecordingExtractor(local_path)
    sorting = se.MEArecSortingExtractor(local_path)

    waveform_folder = Path('waveforms')
    output_folder_radius = Path('phy_output_radius')
    output_folder_thr = Path('phy_output_thr')

    for f in (waveform_folder, output_folder_radius, output_folder_thr):
        if f.is_dir():
            shutil.rmtree(f)

    waveform_extractor = extract_waveforms(recording, sorting, waveform_folder)

    export_to_phy(waveform_extractor,
                  output_folder_radius,
                  compute_pc_features=True,
                  compute_amplitudes=True,
                  max_channels_per_template=None,
                  sparsity_dict=dict(method="radius", radius_um=50),
                  n_jobs=1,
                  chunk_size=10000,
                  progress_bar=True)

    template_ind = np.load(output_folder_radius / "template_ind.npy")
    # templates have different shapes!
    assert -1 in template_ind

    export_to_phy(waveform_extractor,
                  output_folder_thr,
                  compute_pc_features=True,
                  compute_amplitudes=True,
                  max_channels_per_template=None,
                  sparsity_dict=dict(method="threshold", threshold=2),
                  n_jobs=1,
                  chunk_size=10000,
                  progress_bar=True)

    template_ind = np.load(output_folder_thr / "template_ind.npy")
    # templates have different shapes!
    assert -1 in template_ind
def test_localize_peaks():
    repo = 'https://gin.g-node.org/NeuralEnsemble/ephy_testing_data'
    remote_path = 'mearec/mearec_test_10s.h5'
    local_path = download_dataset(repo=repo, remote_path=remote_path, local_folder=None)
    recording = MEArecRecordingExtractor(local_path)

    peaks = detect_peaks(recording,
                         method='by_channel',
                         peak_sign='neg', detect_threshold=5, n_shifts=2,
                         chunk_size=10000, verbose=False, progress_bar=False,
                         outputs='numpy_compact'
                         )

    peak_locations = localize_peaks(recording, peaks,
                                    chunk_size=10000, verbose=True, progress_bar=False, )

    assert peaks.size == peak_locations.shape[0]
Exemple #16
0
def test_motion_functions():
    local_path = download_dataset(repo=repo,
                                  remote_path=remote_path,
                                  local_folder=None)
    recording = MEArecRecordingExtractor(local_path)

    peaks = np.load('mearec_detected_peaks.npy')

    bin_um = 2
    motion_histogram, temporal_bins, spatial_bins = make_motion_histogram(
        recording, peaks, bin_um=bin_um)
    # print(motion_histogram.shape, temporal_bins.size, spatial_bins.size)

    pairwise_displacement = compute_pairwise_displacement(motion_histogram,
                                                          bin_um,
                                                          method='conv2d')

    motion = compute_global_displacement(pairwise_displacement)
Exemple #17
0
def test_export_report():
    repo = 'https://gin.g-node.org/NeuralEnsemble/ephy_testing_data'
    remote_path = 'mearec/mearec_test_10s.h5'
    local_path = download_dataset(repo=repo,
                                  remote_path=remote_path,
                                  local_folder=None)
    recording, sorting = se.read_mearec(local_path)

    waveform_folder = Path('waveforms')
    output_folder = Path('mearec_GT_report')

    for f in (waveform_folder, output_folder):
        if f.is_dir():
            shutil.rmtree(f)

    waveform_extractor = extract_waveforms(recording, sorting, waveform_folder)

    job_kwargs = dict(n_jobs=1, chunk_size=30000, progress_bar=True)

    export_report(waveform_extractor, output_folder, **job_kwargs)
def test_detect_peaks():

    repo = 'https://gin.g-node.org/NeuralEnsemble/ephy_testing_data'
    remote_path = 'mearec/mearec_test_10s.h5'
    local_path = download_dataset(repo=repo, remote_path=remote_path, local_folder=None)
    recording = MEArecRecordingExtractor(local_path)

    # by_channel

    noise_levels = get_noise_levels(recording, return_scaled=False)

    peaks = detect_peaks(recording, method='by_channel',
                         peak_sign='neg', detect_threshold=5, n_shifts=2,
                         chunk_size=10000, verbose=1, progress_bar=False, noise_levels=noise_levels)



    subset_peaks = select_peaks(peaks, max_peaks_per_channel=100)
    subset_peaks = select_peaks(peaks, 'smart_sampling', max_peaks_per_channel=100, noise_levels=noise_levels)

    assert len(subset_peaks) < len(peaks)
def test_find_spikes_from_templates():

    repo = 'https://gin.g-node.org/NeuralEnsemble/ephy_testing_data'
    remote_path = 'mearec/mearec_test_10s.h5'
    local_path = download_dataset(repo=repo,
                                  remote_path=remote_path,
                                  local_folder=None)
    recording, gt_sorting = read_mearec(local_path)

    folder = 'waveforms_mearec'
    we = extract_waveforms(recording,
                           gt_sorting,
                           folder,
                           load_if_exists=True,
                           ms_before=1,
                           ms_after=2.,
                           max_spikes_per_unit=500,
                           return_scaled=False,
                           n_jobs=1,
                           chunk_size=10000)

    method_kwargs = {
        'waveform_extractor': we,
        'noise_levels': get_noise_levels(recording),
    }

    sampling_frequency = recording.get_sampling_frequency()

    result = {}

    for method in template_matching_methods.keys():
        spikes = find_spikes_from_templates(recording,
                                            method=method,
                                            method_kwargs=method_kwargs,
                                            n_jobs=1,
                                            chunk_size=30000,
                                            progress_bar=True)

        result[method] = NumpySorting.from_times_labels(
            spikes['sample_ind'], spikes['cluster_ind'], sampling_frequency)
Exemple #20
0
def test_run_sorter_docker():
    mearec_filename = download_dataset(remote_path='mearec/mearec_test_10s.h5',
                                       unlock=True)
    output_folder = 'sorting_tdc_docker'

    recording, sorting_true = read_mearec(mearec_filename)

    sorter_params = {'detect_threshold': 4.9}

    docker_image = 'spikeinterface/tridesclous-base:1.6.4-1'

    sorting = run_sorter('tridesclous',
                         recording,
                         output_folder=output_folder,
                         remove_existing_folder=True,
                         delete_output_folder=False,
                         verbose=True,
                         raise_error=True,
                         docker_image=docker_image,
                         with_output=False,
                         **sorter_params)
    assert sorting is None
Exemple #21
0
def setup_module():
    local_path = download_dataset(repo=repo,
                                  remote_path=remote_path,
                                  local_folder=None)
    recording = MEArecRecordingExtractor(local_path)

    # detect and localize
    peaks = detect_peaks(
        recording,
        method='locally_exclusive',
        peak_sign='neg',
        detect_threshold=5,
        n_shifts=2,
        chunk_size=10000,
        verbose=1,
        progress_bar=True,
        localization_dict=dict(method='center_of_mass',
                               local_radius_um=150,
                               ms_before=0.1,
                               ms_after=0.3),
        #~ localization_dict=dict(method='monopolar_triangulation', local_radius_um=150, ms_before=0.1, ms_after=0.3, max_distance_um=1000),
    )
    np.save('mearec_detected_peaks.npy', peaks)
def test_compute_spike_amplitudes_parallel():
    repo = 'https://gin.g-node.org/NeuralEnsemble/ephy_testing_data'
    remote_path = 'mearec/mearec_test_10s.h5'
    local_path = download_dataset(repo=repo,
                                  remote_path=remote_path,
                                  local_folder=None)
    recording = se.MEArecRecordingExtractor(local_path)
    sorting = se.MEArecSortingExtractor(local_path)

    folder = Path('mearec_waveforms_all')

    we = extract_waveforms(recording,
                           sorting,
                           folder,
                           ms_before=1.,
                           ms_after=2.,
                           max_spikes_per_unit=None,
                           n_jobs=1,
                           chunk_size=30000,
                           load_if_exists=True)

    amplitudes1 = compute_spike_amplitudes(we,
                                           peak_sign='neg',
                                           load_if_exists=False,
                                           outputs='concatenated',
                                           chunk_size=10000,
                                           n_jobs=1)
    # TODO : fix multi processing for spike amplitudes!!!!!!!
    amplitudes2 = compute_spike_amplitudes(we,
                                           peak_sign='neg',
                                           load_if_exists=False,
                                           outputs='concatenated',
                                           chunk_size=10000,
                                           n_jobs=2)

    assert np.array_equal(amplitudes1[0], amplitudes2[0])
Exemple #23
0
 def setUp(self):
     for remote_path in self.downloads:
         download_dataset(repo=gin_repo,
                          remote_path=remote_path,
                          local_folder=local_folder)
def test_compute_spike_amplitudes():
    repo = 'https://gin.g-node.org/NeuralEnsemble/ephy_testing_data'
    remote_path = 'mearec/mearec_test_10s.h5'
    local_path = download_dataset(repo=repo,
                                  remote_path=remote_path,
                                  local_folder=None)
    recording = se.MEArecRecordingExtractor(local_path)
    sorting = se.MEArecSortingExtractor(local_path)

    folder = Path('mearec_waveforms')

    we = extract_waveforms(recording,
                           sorting,
                           folder,
                           ms_before=1.,
                           ms_after=2.,
                           max_spikes_per_unit=500,
                           n_jobs=1,
                           chunk_size=30000,
                           load_if_exists=False,
                           overwrite=True)

    amplitudes = compute_spike_amplitudes(we,
                                          peak_sign='neg',
                                          outputs='concatenated',
                                          chunk_size=10000,
                                          n_jobs=1)
    amplitudes = compute_spike_amplitudes(we,
                                          peak_sign='neg',
                                          outputs='by_unit',
                                          chunk_size=10000,
                                          n_jobs=1)

    gain = 0.1
    recording.set_channel_gains(gain)
    recording.set_channel_offsets(0)

    folder = Path('mearec_waveforms_scaled')

    we_scaled = extract_waveforms(recording,
                                  sorting,
                                  folder,
                                  ms_before=1.,
                                  ms_after=2.,
                                  max_spikes_per_unit=500,
                                  n_jobs=1,
                                  chunk_size=30000,
                                  load_if_exists=False,
                                  overwrite=True,
                                  return_scaled=True)

    amplitudes_scaled = compute_spike_amplitudes(we_scaled,
                                                 peak_sign='neg',
                                                 outputs='concatenated',
                                                 chunk_size=10000,
                                                 n_jobs=1,
                                                 return_scaled=True)
    amplitudes_unscaled = compute_spike_amplitudes(we_scaled,
                                                   peak_sign='neg',
                                                   outputs='concatenated',
                                                   chunk_size=10000,
                                                   n_jobs=1,
                                                   return_scaled=False)

    assert np.allclose(amplitudes_scaled[0], amplitudes_unscaled[0] * gain)

    # reload as an extension from we
    assert SpikeAmplitudesCalculator in we.get_available_extensions()
    assert we_scaled.is_extension('spike_amplitudes')
    sac = we.load_extension('spike_amplitudes')
    assert isinstance(sac, SpikeAmplitudesCalculator)
    assert sac._amplitudes is not None
    qmc = SpikeAmplitudesCalculator.load_from_folder(folder)
    assert sac._amplitudes is not None
Exemple #25
0
import numpy as np
import matplotlib.pyplot as plt
import spikeinterface.extractors as se

import spikeinterface as si
import spikeinterface.extractors as se


##############################################################################
# Lets download some dataset in diferents formats:
#   * mearec : a simulator format is hdf5 based. contain recording and sorting. file based
#   * spike2: file from spike2 device. contain recording only. file based


spike2_file_path = si.download_dataset(remote_path='spike2/130322-1LY.smr')
print(spike2_file_path)


mearec_folder_path = si.download_dataset(remote_path='mearec/mearec_test_10s.h5')
print(mearec_folder_path)


##############################################################################
# Read spike2 give one object
#
#  Note that internally this file contain 2 stream ('0' and '1') so we need to specify which stream.

recording = se.read_spike2(spike2_file_path, stream_id='0')
print(recording)
print(type(recording))
import matplotlib.pyplot as plt
import numpy as np

from spikeinterface import download_dataset
from spikeinterface import WaveformExtractor, extract_waveforms
import spikeinterface.extractors as se

##############################################################################
# First let's use the repo https://gin.g-node.org/NeuralEnsemble/ephy_testing_data
# to download a MEArec dataset. It is a simulated dataset that contains "ground truth"
# sorting information:

repo = 'https://gin.g-node.org/NeuralEnsemble/ephy_testing_data'
remote_path = 'mearec/mearec_test_10s.h5'
local_path = download_dataset(repo=repo,
                              remote_path=remote_path,
                              local_folder=None)

##############################################################################
# Let's now instantiate the recording and sorting objects:

recording = se.MEArecRecordingExtractor(local_path)
print(recording)
sorting = se.MEArecSortingExtractor(local_path)
print(recording)

###############################################################################
# The MEArec dataset already contains a probe object that you can retrieve
# an plot:

probe = recording.get_probe()
Exemple #27
0
# Import

import numpy as np
import matplotlib.pyplot as plt

import spikeinterface as si
import spikeinterface.extractors as se
import spikeinterface.sorters as ss
import spikeinterface.comparison as sc
import spikeinterface.widgets as sw

##############################################################################
# First, let's download a simulated dataset
#  from the repo 'https://gin.g-node.org/NeuralEnsemble/ephy_testing_data'

local_path = si.download_dataset(remote_path='mearec/mearec_test_10s.h5')
recording, sorting = se.read_mearec(local_path)
print(recording)
print(sorting)

#############################################################################
# Then run two spike sorters and compare their output.

sorting_HS = ss.run_herdingspikes(recording)
sorting_TDC = ss.run_tridesclous(recording)

#############################################################################
# The :code:`compare_two_sorters` function allows us to compare the spike
# sorting output. It returns a :code:`SortingComparison` object, with methods
# to inspect the comparison output easily. The comparison matches the
# units by comparing the agreement between unit spike trains.
Exemple #28
0
def test_get_spike_amplitudes():
    repo = 'https://gin.g-node.org/NeuralEnsemble/ephy_testing_data'
    remote_path = 'mearec/mearec_test_10s.h5'
    local_path = download_dataset(repo=repo,
                                  remote_path=remote_path,
                                  local_folder=None)
    recording = se.MEArecRecordingExtractor(local_path)
    sorting = se.MEArecSortingExtractor(local_path)

    folder = Path('mearec_waveforms')

    we = extract_waveforms(recording,
                           sorting,
                           folder,
                           ms_before=1.,
                           ms_after=2.,
                           max_spikes_per_unit=500,
                           n_jobs=1,
                           chunk_size=30000,
                           load_if_exists=False,
                           overwrite=True)

    amplitudes = get_spike_amplitudes(we,
                                      peak_sign='neg',
                                      outputs='concatenated',
                                      chunk_size=10000,
                                      n_jobs=1)
    amplitudes = get_spike_amplitudes(we,
                                      peak_sign='neg',
                                      outputs='by_unit',
                                      chunk_size=10000,
                                      n_jobs=1)

    gain = 0.1
    recording.set_channel_gains(gain)
    recording.set_channel_offsets(0)

    folder = Path('mearec_waveforms_scaled')

    we_scaled = extract_waveforms(recording,
                                  sorting,
                                  folder,
                                  ms_before=1.,
                                  ms_after=2.,
                                  max_spikes_per_unit=500,
                                  n_jobs=1,
                                  chunk_size=30000,
                                  load_if_exists=False,
                                  overwrite=True,
                                  return_scaled=True)

    amplitudes_scaled = get_spike_amplitudes(we_scaled,
                                             peak_sign='neg',
                                             outputs='concatenated',
                                             chunk_size=10000,
                                             n_jobs=1,
                                             return_scaled=True)
    amplitudes_unscaled = get_spike_amplitudes(we_scaled,
                                               peak_sign='neg',
                                               outputs='concatenated',
                                               chunk_size=10000,
                                               n_jobs=1,
                                               return_scaled=False)

    assert np.allclose(amplitudes_scaled[0], amplitudes_unscaled[0] * gain)