예제 #1
0
def test_DataIO_probes():
    # initialze dataio
    if os.path.exists('test_DataIO'):
        shutil.rmtree('test_DataIO')
        
    dataio = DataIO(dirname='test_DataIO')
    print(dataio)


    localdir, filenames, params = download_dataset(name='olfactory_bulb')
    dataio.set_data_source(type='RawData', filenames=filenames,  **params)
    
    probe_filename = 'A4x8-5mm-100-400-413-A32.prb'
    dataio.download_probe(probe_filename)
    dataio.download_probe('A4x8-5mm-100-400-413-A32')
    
    #~ print(dataio.channel_groups)
    #~ print(dataio.channels)
    #~ print(dataio.info['probe_filename'])
    
    assert dataio.nb_channel(0) == 8
    assert probe_filename == dataio.info['probe_filename']
    
    dataio = DataIO(dirname='test_DataIO')
    print(dataio)
예제 #2
0
def test_DataIO_probes():
    # initialze dataio
    if os.path.exists('test_DataIO'):
        shutil.rmtree('test_DataIO')
        
    dataio = DataIO(dirname='test_DataIO')
    print(dataio)


    localdir, filenames, params = download_dataset(name='olfactory_bulb')
    dataio.set_data_source(type='RawData', filenames=filenames,  **params)
    
    probe_filename = 'neuronexus/A4x8-5mm-100-400-413-A32.prb'
    dataio.download_probe(probe_filename)
    dataio.download_probe('neuronexus/A4x8-5mm-100-400-413-A32')
    
    #~ print(dataio.channel_groups)
    #~ print(dataio.channels)
    #~ print(dataio.info['probe_filename'])
    
    assert dataio.nb_channel(0) == 8
    assert probe_filename.split('/')[-1] == dataio.info['probe_filename']
    
    dataio = DataIO(dirname='test_DataIO')
    print(dataio)
예제 #3
0
def test_DataIO():
    
    
    # initialze dataio
    if os.path.exists('test_DataIO'):
        shutil.rmtree('test_DataIO')
        
    dataio = DataIO(dirname='test_DataIO')
    print(dataio)


    localdir, filenames, params = download_dataset(name='olfactory_bulb')
    dataio.set_data_source(type='RawData', filenames=filenames,  **params)
    #~ dataio.set_channels(range(4))
    dataio.set_manual_channel_group(range(14))
    
    
    for seg_num in range(dataio.nb_segment):
        for i_stop, sigs_chunk in dataio.iter_over_chunk(seg_num=seg_num, chunksize=1024):
            assert sigs_chunk.shape[0] == 1024
            assert sigs_chunk.shape[1] == 14
            #~ print(seg_num, i_stop, sigs_chunk.shape)
    
    
    #reopen existing
    dataio = DataIO(dirname='test_DataIO')
    print(dataio)
    
    #~ exit()
    
    for seg_num in range(dataio.nb_segment):
        #~ print('seg_num', seg_num)
        for i_stop, sigs_chunk in dataio.iter_over_chunk(seg_num=seg_num, chunksize=1024):
            assert sigs_chunk.shape[0] == 1024
            assert sigs_chunk.shape[1] == 14
def test_make_catalogue():
    if os.path.exists('test_catalogueconstructor'):
        shutil.rmtree('test_catalogueconstructor')
        
    dataio = DataIO(dirname='test_catalogueconstructor')
    localdir, filenames, params = download_dataset(name='olfactory_bulb')
    dataio.set_data_source(type='RawData', filenames=filenames, **params)
    #~ dataio.set_manual_channel_group(range(14))
    dataio.set_manual_channel_group([5, 6, 7, 8, 9])

    catalogueconstructor = CatalogueConstructor(dataio=dataio)

    catalogueconstructor.set_preprocessor_params(chunksize=1024,
            
                                    #signal preprocessor
                                    highpass_freq=300,
                                    backward_chunksize=1280,
                                    
                                    #peak detector
                                    peak_sign='-', relative_threshold=7, peak_span=0.0005,
                                    )
    
    t1 = time.perf_counter()
    catalogueconstructor.estimate_signals_noise(seg_num=0, duration=10.)
    t2 = time.perf_counter()
    print('estimate_signals_noise', t2-t1)
    
    t1 = time.perf_counter()
    catalogueconstructor.run_signalprocessor()
    t2 = time.perf_counter()
    print('run_signalprocessor', t2-t1)

    print(catalogueconstructor)
    
    t1 = time.perf_counter()
    catalogueconstructor.extract_some_waveforms(n_left=-12, n_right=15,  nb_max=10000)
    t2 = time.perf_counter()
    print('extract_some_waveforms', t2-t1)


    # PCA
    t1 = time.perf_counter()
    catalogueconstructor.project(method='pca', n_components=12, batch_size=16384)
    t2 = time.perf_counter()
    print('project', t2-t1)
    
    # cluster
    t1 = time.perf_counter()
    catalogueconstructor.find_clusters(method='kmeans', n_clusters=13)
    t2 = time.perf_counter()
    print('find_clusters', t2-t1)
    
    # trash_small_cluster
    catalogueconstructor.trash_small_cluster()
    
    catalogueconstructor.make_catalogue()
예제 #5
0
def test_RawDataSource():
    localdir, filenames, params = download_dataset(name='olfactory_bulb')
    datasource = RawDataSource(filenames=filenames, **params)
    
    assert datasource.total_channel == 16
    assert datasource.sample_rate == 10000.
    assert datasource.nb_segment==3
    assert datasource.get_segment_shape(0) == (150000, 16)
    data = datasource.get_signals_chunk(seg_num=0)
    assert data.shape==datasource.get_segment_shape(0)
예제 #6
0
def test_get_auto_params_for_catalogue():
    if os.path.exists('test_cataloguetools'):
        shutil.rmtree('test_cataloguetools')
        
    dataio = DataIO(dirname='test_cataloguetools')
    #~ localdir, filenames, params = download_dataset(name='olfactory_bulb')
    localdir, filenames, params = download_dataset(name='locust')
    dataio.set_data_source(type='RawData', filenames=filenames, **params)
    
    params = get_auto_params_for_catalogue(dataio)
    pprint(params)
예제 #7
0
def test_RawDataSource():
    from tridesclous.datasource import RawDataSource

    localdir, filenames, params = download_dataset(name='olfactory_bulb')
    datasource = RawDataSource(filenames=filenames, **params)

    assert datasource.total_channel == 16
    assert datasource.sample_rate == 10000.
    assert datasource.nb_segment == 3
    assert datasource.get_segment_shape(0) == (150000, 16)
    data = datasource.get_signals_chunk(seg_num=0)
    assert data.shape == datasource.get_segment_shape(0)
예제 #8
0
def test_DataIO():

    # initialze dataio
    if os.path.exists('test_DataIO'):
        shutil.rmtree('test_DataIO')

    dataio = DataIO(dirname='test_DataIO')
    print(dataio)

    localdir, filenames, params = download_dataset(name='olfactory_bulb')
    dataio.set_data_source(type='RawData', filenames=filenames, **params)

    #with geometry
    channels = list(range(14))
    channel_groups = {
        0: {
            'channels': range(14),
            'geometry': {c: [0, i]
                         for i, c in enumerate(channels)}
        }
    }
    dataio.set_channel_groups(channel_groups)

    #with no geometry
    channel_groups = {0: {'channels': range(4)}}
    dataio.set_channel_groups(channel_groups)

    # add one group
    dataio.add_one_channel_group(channels=range(4, 8), chan_grp=5)

    channel_groups = {0: {'channels': range(14)}}
    dataio.set_channel_groups(channel_groups)

    for seg_num in range(dataio.nb_segment):
        for i_stop, sigs_chunk in dataio.iter_over_chunk(seg_num=seg_num,
                                                         chunksize=1024):
            assert sigs_chunk.shape[0] == 1024
            assert sigs_chunk.shape[1] == 14
            #~ print(seg_num, i_stop, sigs_chunk.shape)

    #reopen existing
    dataio = DataIO(dirname='test_DataIO')
    print(dataio)

    #~ exit()

    for seg_num in range(dataio.nb_segment):
        #~ print('seg_num', seg_num)
        for i_stop, sigs_chunk in dataio.iter_over_chunk(seg_num=seg_num,
                                                         chunksize=1024):
            assert sigs_chunk.shape[0] == 1024
            assert sigs_chunk.shape[1] == 14
def compare_nb_waveforms():
    if os.path.exists('test_catalogueconstructor'):
        shutil.rmtree('test_catalogueconstructor')
        
    dataio = DataIO(dirname='test_catalogueconstructor')
    localdir, filenames, params = download_dataset(name='olfactory_bulb')
    dataio.set_data_source(type='RawData', filenames=filenames, **params)
    dataio.set_manual_channel_group(range(14))

    catalogueconstructor = CatalogueConstructor(dataio=dataio)

    catalogueconstructor.set_preprocessor_params(chunksize=1024,
            
                                #signal preprocessor
                                highpass_freq=300,
                                backward_chunksize=1280,
                                
                                #peak detector
                                peak_sign='-', relative_threshold=7, peak_span=0.0005,
                                )
    
    t1 = time.perf_counter()
    catalogueconstructor.estimate_signals_noise(seg_num=0, duration=10.)
    t2 = time.perf_counter()
    print('estimate_signals_noise', t2-t1)
    
    t1 = time.perf_counter()
    catalogueconstructor.run_signalprocessor()
    t2 = time.perf_counter()
    print('run_signalprocessor', t2-t1)
    
    print(catalogueconstructor)
    
    fig, axs = pyplot.subplots(nrows=2)
    
    colors = ['r', 'g', 'b']
    for i, nb_max in enumerate([100, 1000, 10000]):
        t1 = time.perf_counter()
        catalogueconstructor.extract_some_waveforms(n_left=-20, n_right=30,  nb_max=nb_max)
        t2 = time.perf_counter()
        print('extract_some_waveforms', nb_max,  t2-t1)
        print(catalogueconstructor.some_waveforms.shape)
        wf = catalogueconstructor.some_waveforms
        wf = wf.swapaxes(1,2).reshape(wf.shape[0], -1)
        axs[0].plot(np.median(wf, axis=0), color=colors[i], label='nb_max {}'.format(nb_max))
        
        axs[1].plot(np.mean(wf, axis=0), color=colors[i], label='nb_max {}'.format(nb_max))
    
    axs[0].legend()
    axs[0].set_title('median')
    axs[1].set_title('mean')
    pyplot.show()        
예제 #10
0
def make_catalogue():
    if os.path.exists(dirname):
        shutil.rmtree(dirname)

    dataio = DataIO(dirname=dirname)
    localdir, filenames, params = download_dataset(name='olfactory_bulb')
    dataio.set_data_source(type='RawData', filenames=filenames, **params)
    dataio.add_one_channel_group(channels=channels)

    cc = CatalogueConstructor(dataio=dataio)

    params = {
        'duration': 300.,
        'preprocessor': {
            'highpass_freq': 300.,
            'chunksize': 1024,
            'lostfront_chunksize': 100,
        },
        'peak_detector': {
            'peak_sign': '-',
            'relative_threshold': 7.,
            'peak_span': 0.0005,
            #~ 'peak_span' : 0.000,
        },
        'extract_waveforms': {
            'n_left': -25,
            'n_right': 40,
            'nb_max': 10000,
        },
        'clean_waveforms': {
            'alien_value_threshold': 60.,
        },
        'noise_snippet': {
            'nb_snippet': 300,
        },
        'feature_method': 'global_pca',
        'feature_kargs': {
            'n_components': 20
        },
        'cluster_method': 'kmeans',
        'cluster_kargs': {
            'n_clusters': 5
        },
        'clean_cluster': False,
        'clean_cluster_kargs': {},
    }

    apply_all_catalogue_steps(cc, params, verbose=True)

    cc.order_clusters(by='waveforms_rms')
    cc.move_cluster_to_trash(4)
    cc.make_catalogue_for_peeler()
예제 #11
0
def test_apply_all_catalogue_steps():
    if os.path.exists('test_cataloguetools'):
        shutil.rmtree('test_cataloguetools')
        
    dataio = DataIO(dirname='test_cataloguetools')
    #~ localdir, filenames, params = download_dataset(name='olfactory_bulb')
    localdir, filenames, params = download_dataset(name='locust')
    dataio.set_data_source(type='RawData', filenames=filenames, **params)
    
    params = get_auto_params_for_catalogue(dataio)
    
    cc = CatalogueConstructor(dataio, chan_grp=0)
    apply_all_catalogue_steps(cc, params, verbose=True)
def compare_nb_waveforms():
    if os.path.exists('test_catalogueconstructor'):
        shutil.rmtree('test_catalogueconstructor')

    dataio = DataIO(dirname='test_catalogueconstructor')
    localdir, filenames, params = download_dataset(name='olfactory_bulb')
    dataio.set_data_source(type='RawData', filenames=filenames, **params)
    dataio.add_one_channel_group(channels=range(14), chan_grp=0)

    catalogueconstructor = CatalogueConstructor(dataio=dataio)

    catalogueconstructor.set_preprocessor_params(
        chunksize=1024,

        #signal preprocessor
        highpass_freq=300.,
        lowpass_freq=5000.,
        lostfront_chunksize=128,

        #peak detector
        peak_sign='-',
        relative_threshold=7,
        peak_span=0.0005,
    )

    t1 = time.perf_counter()
    catalogueconstructor.estimate_signals_noise(seg_num=0, duration=10.)
    t2 = time.perf_counter()
    print('estimate_signals_noise', t2 - t1)

    t1 = time.perf_counter()
    catalogueconstructor.run_signalprocessor()
    t2 = time.perf_counter()
    print('run_signalprocessor', t2 - t1)

    print(catalogueconstructor)

    fig, axs = plt.subplots(nrows=2)

    colors = ['r', 'g', 'b']
    for i, nb_max in enumerate([100, 1000, 10000]):
        t1 = time.perf_counter()
        catalogueconstructor.extract_some_waveforms(n_left=-20,
                                                    n_right=30,
                                                    nb_max=nb_max)
        t2 = time.perf_counter()
        print('extract_some_waveforms', nb_max, t2 - t1)
        print(catalogueconstructor.some_waveforms.shape)
        wf = catalogueconstructor.some_waveforms
        wf = wf.swapaxes(1, 2).reshape(wf.shape[0], -1)
        axs[0].plot(np.median(wf, axis=0),
                    color=colors[i],
                    label='nb_max {}'.format(nb_max))

        axs[1].plot(np.mean(wf, axis=0),
                    color=colors[i],
                    label='nb_max {}'.format(nb_max))

    axs[0].legend()
    axs[0].set_title('median')
    axs[1].set_title('mean')
    plt.show()
예제 #13
0
def compare_nb_waveforms():
    if os.path.exists('test_catalogueconstructor'):
        shutil.rmtree('test_catalogueconstructor')

    dataio = DataIO(dirname='test_catalogueconstructor')
    localdir, filenames, params = download_dataset(name='olfactory_bulb')
    dataio.set_data_source(type='RawData', filenames=filenames, **params)
    dataio.add_one_channel_group(channels=range(14), chan_grp=0)

    cc = CatalogueConstructor(dataio=dataio)

    cc.set_global_params(
        chunksize=1024,
        memory_mode='ram',
        mode='dense',
        n_jobs=1,
        #~ adjacency_radius_um=None,
    )

    cc.set_preprocessor_params(
        #signal preprocessor
        highpass_freq=300,
        lowpass_freq=5000.,
        common_ref_removal=False,
        smooth_size=0,
        lostfront_chunksize=None)

    cc.set_peak_detector_params(
        #peak detector
        method='global',
        engine='numpy',
        peak_sign='-',
        relative_threshold=7,
        peak_span_ms=0.5,
        #~ adjacency_radius_um=None,
    )

    t1 = time.perf_counter()
    cc.estimate_signals_noise(seg_num=0, duration=10.)
    t2 = time.perf_counter()
    print('estimate_signals_noise', t2 - t1)

    t1 = time.perf_counter()
    cc.run_signalprocessor()
    t2 = time.perf_counter()
    print('run_signalprocessor', t2 - t1)

    print(cc)

    fig, axs = plt.subplots(nrows=2)

    cc.set_waveform_extractor_params(wf_left_ms=-2.0, wf_right_ms=3.0)

    t1 = time.perf_counter()
    cc.sample_some_peaks(mode='rand', nb_max=5000)
    t2 = time.perf_counter()
    print('sample_some_peaks', t2 - t1)

    colors = ['r', 'g', 'b', 'y']
    for i, nb_max in enumerate([100, 500, 1000, 2000]):

        cc.sample_some_peaks(mode='rand', nb_max=nb_max)
        #~ catalogueconstructor.extract_some_waveforms(wf_left_ms=-2.0, wf_right_ms=3.0,  nb_max=nb_max)
        #~ print(catalogueconstructor.some_waveforms.shape)
        t1 = time.perf_counter()
        wf = cc.get_some_waveforms()
        t2 = time.perf_counter()
        print('get_some_waveforms', nb_max, t2 - t1)

        #~ wf = catalogueconstructor.some_waveforms
        wf = wf.swapaxes(1, 2).reshape(wf.shape[0], -1)
        axs[0].plot(np.median(wf, axis=0),
                    color=colors[i],
                    label='nb_max {}'.format(nb_max))

        axs[1].plot(np.mean(wf, axis=0),
                    color=colors[i],
                    label='nb_max {}'.format(nb_max))

    axs[0].legend()
    axs[0].set_title('median')
    axs[1].set_title('mean')
    plt.show()
예제 #14
0
def test_catalogue_constructor():
    if os.path.exists('test_catalogueconstructor'):
        shutil.rmtree('test_catalogueconstructor')

    dataio = DataIO(dirname='test_catalogueconstructor')
    localdir, filenames, params = download_dataset(name='olfactory_bulb')
    #~ localdir, filenames, params = download_dataset(name='locust')
    dataio.set_data_source(type='RawData', filenames=filenames, **params)

    channels = range(14)
    #~ channels=list(range(4))
    dataio.add_one_channel_group(channels=channels, chan_grp=0)

    cc = CatalogueConstructor(dataio=dataio)

    for memory_mode in ['ram', 'memmap']:
        for mode in ['dense', 'sparse']:

            print('*' * 5)
            print('memory_mode', memory_mode, 'mode', mode)

            if mode == 'dense':
                peak_engine = 'numpy'
                peak_method = 'global'
                adjacency_radius_um = None
            elif mode == 'sparse':
                peak_engine = 'numpy'
                peak_method = 'geometrical'
                adjacency_radius_um = 450.

            cc.set_global_params(
                chunksize=1024,
                memory_mode=memory_mode,
                mode=mode,
                n_jobs=1,
                #~ adjacency_radius_um=adjacency_radius_um,
            )

            cc.set_preprocessor_params(
                #signal preprocessor
                highpass_freq=300,
                lowpass_freq=5000.,
                common_ref_removal=False,
                smooth_size=0,
                lostfront_chunksize=None)

            cc.set_peak_detector_params(
                #peak detector
                method=peak_method,
                engine=peak_engine,
                peak_sign='-',
                relative_threshold=7,
                peak_span_ms=0.5,
                adjacency_radius_um=adjacency_radius_um,
            )

            t1 = time.perf_counter()
            cc.estimate_signals_noise(seg_num=0, duration=10.)
            t2 = time.perf_counter()
            print('estimate_signals_noise', t2 - t1)

            t1 = time.perf_counter()
            cc.run_signalprocessor(duration=10., detect_peak=True)
            t2 = time.perf_counter()
            print('run_signalprocessor_loop', t2 - t1)

            for seg_num in range(dataio.nb_segment):
                mask = cc.all_peaks['segment'] == seg_num
                print('seg_num', seg_num, 'nb peak', np.sum(mask))

            # redetect peak
            cc.re_detect_peak(method=peak_method,
                              engine=peak_engine,
                              peak_sign='-',
                              relative_threshold=5,
                              peak_span_ms=0.7,
                              adjacency_radius_um=adjacency_radius_um)
            for seg_num in range(dataio.nb_segment):
                mask = cc.all_peaks['segment'] == seg_num
                print('seg_num', seg_num, 'nb peak', np.sum(mask))

            cc.set_waveform_extractor_params(n_left=-25, n_right=40)

            t1 = time.perf_counter()
            cc.clean_peaks(alien_value_threshold=100,
                           mode='extremum_amplitude')
            t2 = time.perf_counter()
            print('clean_peaks extremum_amplitude', t2 - t1)

            t1 = time.perf_counter()
            cc.clean_peaks(alien_value_threshold=100, mode='full_waveform')
            t2 = time.perf_counter()
            print('clean_peaks full_waveforms', t2 - t1)

            t1 = time.perf_counter()
            cc.sample_some_peaks(mode='rand', nb_max=5000)
            t2 = time.perf_counter()
            print('sample_some_peaks', t2 - t1)

            print(cc)

            #extract_some_noise
            t1 = time.perf_counter()
            cc.extract_some_noise(nb_snippet=400)
            t2 = time.perf_counter()
            print('extract_some_noise', t2 - t1)

            if mode == 'dense':
                # PCA
                t1 = time.perf_counter()
                cc.extract_some_features(method='global_pca', n_components=12)
                t2 = time.perf_counter()
                print('project pca', t2 - t1)

                # cluster
                t1 = time.perf_counter()
                cc.find_clusters(method='kmeans', n_clusters=11)
                t2 = time.perf_counter()
                print('find_clusters', t2 - t1)

            elif mode == 'sparse':

                # PCA
                t1 = time.perf_counter()
                cc.extract_some_features(method='pca_by_channel',
                                         n_components_by_channel=3)
                t2 = time.perf_counter()
                print('project pca', t2 - t1)

                # cluster
                t1 = time.perf_counter()
                cc.find_clusters(method='pruningshears')
                t2 = time.perf_counter()
                print('find_clusters', t2 - t1)

            print(cc)

        t1 = time.perf_counter()
        cc.auto_split_cluster()
        t2 = time.perf_counter()
        print('auto_split_cluster', t2 - t1)

        t1 = time.perf_counter()
        cc.trash_not_aligned()
        t2 = time.perf_counter()
        print('trash_not_aligned', t2 - t1)

        t1 = time.perf_counter()
        cc.auto_merge_cluster()
        t2 = time.perf_counter()
        print('auto_merge_cluster', t2 - t1)

        t1 = time.perf_counter()
        cc.trash_low_extremum()
        t2 = time.perf_counter()
        print('trash_low_extremum', t2 - t1)

        t1 = time.perf_counter()
        cc.trash_small_cluster()
        t2 = time.perf_counter()
        print('trash_small_cluster', t2 - t1)
def test_catalogue_constructor():
    if os.path.exists('test_catalogueconstructor'):
        shutil.rmtree('test_catalogueconstructor')
        
    dataio = DataIO(dirname='test_catalogueconstructor')
    localdir, filenames, params = download_dataset(name='olfactory_bulb')
    dataio.set_data_source(type='RawData', filenames=filenames, **params)
    
    channels=range(14)
    dataio.set_manual_channel_group(channels, chan_grp=0)
    
    catalogueconstructor = CatalogueConstructor(dataio=dataio)
    
    for memory_mode in ['ram', 'memmap']:
    #~ for memory_mode in ['memmap']:
    
        print()
        print(memory_mode)
        catalogueconstructor.set_preprocessor_params(chunksize=1024,
                memory_mode=memory_mode,
                
                #signal preprocessor
                highpass_freq=300,
                backward_chunksize=1280,
                #~ backward_chunksize=1024*2,
                
                #peak detector
                peakdetector_engine='numpy',
                peak_sign='-', relative_threshold=7, peak_span=0.0005,
                
                #waveformextractor
                #~ n_left=-20, n_right=30, 
                
                )
        t1 = time.perf_counter()
        catalogueconstructor.estimate_signals_noise(seg_num=0, duration=10.)
        t2 = time.perf_counter()
        print('estimate_signals_noise', t2-t1)
        
        t1 = time.perf_counter()
        for seg_num in range(dataio.nb_segment):
            #~ print('seg_num', seg_num)
            catalogueconstructor.run_signalprocessor_loop_one_segment(seg_num=seg_num, duration=10.)
        t2 = time.perf_counter()
        print('run_signalprocessor_loop', t2-t1)

        t1 = time.perf_counter()
        catalogueconstructor.finalize_signalprocessor_loop()
        t2 = time.perf_counter()
        print('finalize_signalprocessor_loop', t2-t1)
        
        for seg_num in range(dataio.nb_segment):
            mask = catalogueconstructor.all_peaks['segment']==seg_num
            print('seg_num', seg_num, np.sum(mask))
        
        
        t1 = time.perf_counter()
        catalogueconstructor.extract_some_waveforms(n_left=-25, n_right=40, mode='rand', nb_max=5000)
        t2 = time.perf_counter()
        print('extract_some_waveforms rand', t2-t1)
        print(catalogueconstructor.some_waveforms.shape)

        t1 = time.perf_counter()
        catalogueconstructor.find_good_limits()
        t2 = time.perf_counter()
        print('find_good_limits', t2-t1)
        print(catalogueconstructor.some_waveforms.shape)

        t1 = time.perf_counter()
        catalogueconstructor.extract_some_waveforms(n_left=None, n_right=None, mode='rand', nb_max=2000)
        t2 = time.perf_counter()
        print('extract_some_waveforms rand', t2-t1)
        print(catalogueconstructor.some_waveforms.shape)


        #~ break


        
        # PCA
        t1 = time.perf_counter()
        catalogueconstructor.project(method='pca', n_components=7, batch_size=16384)
        t2 = time.perf_counter()
        print('project pca', t2-t1)

        # peak_max
        #~ t1 = time.perf_counter()
        #~ catalogueconstructor.project(method='peak_max')
        #~ t2 = time.perf_counter()
        #~ print('project peak_max', t2-t1)
        #~ print(catalogueconstructor.some_features.shape)

        t1 = time.perf_counter()
        catalogueconstructor.extract_some_waveforms(index=np.arange(1000))
        t2 = time.perf_counter()
        print('extract_some_waveforms others', t2-t1)
        print(catalogueconstructor.some_waveforms.shape)

        
        # cluster
        t1 = time.perf_counter()
        catalogueconstructor.find_clusters(method='kmeans', n_clusters=11)
        t2 = time.perf_counter()
        print('find_clusters', t2-t1)
def test_catalogue_constructor():
    if os.path.exists('test_catalogueconstructor'):
        shutil.rmtree('test_catalogueconstructor')

    dataio = DataIO(dirname='test_catalogueconstructor')
    localdir, filenames, params = download_dataset(name='olfactory_bulb')
    #~ localdir, filenames, params = download_dataset(name='locust')
    dataio.set_data_source(type='RawData', filenames=filenames, **params)

    channels = range(14)
    #~ channels=list(range(4))
    dataio.add_one_channel_group(channels=channels, chan_grp=0)

    catalogueconstructor = CatalogueConstructor(dataio=dataio)

    for memory_mode in ['ram', 'memmap']:
        #~ for memory_mode in ['memmap']:

        print()
        print(memory_mode)
        catalogueconstructor.set_preprocessor_params(
            chunksize=1024,
            memory_mode=memory_mode,

            #signal preprocessor
            highpass_freq=300,
            lowpass_freq=5000.,
            common_ref_removal=False,
            smooth_size=0,
            lostfront_chunksize=128,

            #peak detector
            peakdetector_engine='numpy',
            peak_sign='-',
            relative_threshold=7,
            peak_span=0.0005,

            #waveformextractor
            #~ n_left=-20, n_right=30,
        )
        t1 = time.perf_counter()
        catalogueconstructor.estimate_signals_noise(seg_num=0, duration=10.)
        t2 = time.perf_counter()
        print('estimate_signals_noise', t2 - t1)

        #~ t1 = time.perf_counter()
        #~ for seg_num in range(dataio.nb_segment):
        #~ print('seg_num', seg_num)
        #~ catalogueconstructor.run_signalprocessor_loop_one_segment(seg_num=seg_num, duration=10.)
        catalogueconstructor.run_signalprocessor(duration=10.,
                                                 detect_peak=True)
        t2 = time.perf_counter()
        print('run_signalprocessor_loop', t2 - t1)

        for seg_num in range(dataio.nb_segment):
            mask = catalogueconstructor.all_peaks['segment'] == seg_num
            print('seg_num', seg_num, 'nb peak', np.sum(mask))

        #redetect peak
        catalogueconstructor.re_detect_peak(peakdetector_engine='numpy',
                                            peak_sign='-',
                                            relative_threshold=5,
                                            peak_span=0.0002)
        for seg_num in range(dataio.nb_segment):
            mask = catalogueconstructor.all_peaks['segment'] == seg_num
            print('seg_num', seg_num, 'nb peak', np.sum(mask))

        t1 = time.perf_counter()
        catalogueconstructor.extract_some_waveforms(n_left=-25,
                                                    n_right=40,
                                                    mode='rand',
                                                    nb_max=5000)
        t2 = time.perf_counter()
        print('extract_some_waveforms rand', t2 - t1)
        print(catalogueconstructor.some_waveforms.shape)

        t1 = time.perf_counter()
        catalogueconstructor.find_good_limits()
        t2 = time.perf_counter()
        print('find_good_limits', t2 - t1)
        print(catalogueconstructor.some_waveforms.shape)

        t1 = time.perf_counter()
        catalogueconstructor.extract_some_waveforms(n_left=None,
                                                    n_right=None,
                                                    mode='rand',
                                                    nb_max=5000)
        t2 = time.perf_counter()
        print('extract_some_waveforms rand', t2 - t1)
        print(catalogueconstructor.some_waveforms.shape)

        t1 = time.perf_counter()
        catalogueconstructor.clean_waveforms(alien_value_threshold=60.)
        t2 = time.perf_counter()
        print('clean_waveforms', t2 - t1)

        print(catalogueconstructor)

        #extract_some_noise
        t1 = time.perf_counter()
        catalogueconstructor.extract_some_noise(nb_snippet=400)
        t2 = time.perf_counter()
        print('extract_some_noise', t2 - t1)

        # PCA
        t1 = time.perf_counter()
        catalogueconstructor.project(method='global_pca',
                                     n_components=7,
                                     batch_size=16384)
        t2 = time.perf_counter()
        print('project pca', t2 - t1)

        # peak_max
        #~ t1 = time.perf_counter()
        #~ catalogueconstructor.project(method='peak_max')
        #~ t2 = time.perf_counter()
        #~ print('project peak_max', t2-t1)
        #~ print(catalogueconstructor.some_features.shape)

        #~ t1 = time.perf_counter()
        #~ catalogueconstructor.extract_some_waveforms(index=np.arange(1000))
        #~ t2 = time.perf_counter()
        #~ print('extract_some_waveforms others', t2-t1)
        #~ print(catalogueconstructor.some_waveforms.shape)

        # cluster
        t1 = time.perf_counter()
        catalogueconstructor.find_clusters(method='kmeans', n_clusters=11)
        t2 = time.perf_counter()
        print('find_clusters', t2 - t1)

        print(catalogueconstructor)
예제 #17
0
def test_download_dataset():
    
    download_dataset(name='locust')
    download_dataset(name='olfactory_bulb')
예제 #18
0
Here a demo.


"""

import numpy as np

import tridesclous as tdc
import tridesclous.online

import pyqtgraph as pg
import pyacq

# get sigs
localdir, filenames, params = tdc.download_dataset(name='olfactory_bulb')
filename = filenames[0]  #only first file
sigs = np.fromfile(filename,
                   dtype=params['dtype']).reshape(-1, params['total_channel'])
sigs = sigs.astype('float32')
sample_rate = params['sample_rate']

# This will impact the latency
chunksize = 1024

# Here a convinient fonction to create a fake device in background
# by playing signal more or less at the good speed
man = pyacq.create_manager(auto_close_at_exit=True)
ng0 = man.create_nodegroup()  # process in background
dev = tridesclous.online.make_pyacq_device_from_buffer(sigs,
                                                       sample_rate,
예제 #19
0
def test_download_dataset():

    download_dataset(name='locust')
    download_dataset(name='olfactory_bulb')