Exemple #1
0
def traces(args):
    from vispy.app import run
    from phy.plot.traces import TraceView
    from phy.io.h5 import open_h5
    from phy.io.traces import read_kwd, read_dat

    path = args.file
    if path.endswith('.kwd'):
        f = open_h5(args.file)
        traces = read_kwd(f)
    elif path.endswith(('.dat', '.bin')):
        if not args.n_channels:
            raise ValueError("Please specify `--n-channels`.")
        if not args.dtype:
            raise ValueError("Please specify `--dtype`.")
        if not args.sample_rate:
            raise ValueError("Please specify `--sample-rate`.")
        n_channels = int(args.n_channels)
        dtype = np.dtype(args.dtype)
        traces = read_dat(path, dtype=dtype, n_channels=n_channels)

    start, end = map(int, args.interval.split(','))
    sample_rate = float(args.sample_rate)
    start = int(sample_rate * start)
    end = int(sample_rate * end)

    c = TraceView(keys='interactive')
    c.visual.traces = .01 * traces[start:end, ...]
    c.show()
    run()

    return None, None
Exemple #2
0
def traces(args):
    from vispy.app import run
    from phy.plot.traces import TraceView
    from phy.io.h5 import open_h5
    from phy.io.traces import read_kwd, read_dat

    path = args.file
    if path.endswith('.kwd'):
        f = open_h5(args.file)
        traces = read_kwd(f)
    elif path.endswith(('.dat', '.bin')):
        if not args.n_channels:
            raise ValueError("Please specify `--n-channels`.")
        if not args.dtype:
            raise ValueError("Please specify `--dtype`.")
        if not args.sample_rate:
            raise ValueError("Please specify `--sample-rate`.")
        n_channels = int(args.n_channels)
        dtype = np.dtype(args.dtype)
        traces = read_dat(path, dtype=dtype, n_channels=n_channels)

    start, end = map(int, args.interval.split(','))
    sample_rate = float(args.sample_rate)
    start = int(sample_rate * start)
    end = int(sample_rate * end)

    c = TraceView(keys='interactive')
    c.visual.traces = .01 * traces[start:end, ...]
    c.show()
    run()

    return None, None
def _read_templates(basename, probe, n_total_channels, n_channels):
    with open_h5(basename + '.templates.mat', 'r') as f:
        templates = f.read('/templates')
        n_templates, n_samples, n_channels = templates.shape
        n_templates    //= 2
        templates        = templates[:n_templates, :, :]
        masks            = np.zeros((n_templates, n_channels))
        electrodes       = np.argmax(np.abs(templates).max(1), 1)

        inv_nodes        = np.zeros(n_total_channels, dtype=np.int32)
        nodes            = []
        for key in probe['channel_groups'].keys():
          nodes += probe['channel_groups'][key]['channels']
        nodes            = np.array(nodes, dtype=np.int32)
        idx              = np.argsort(nodes)
        nodes            = np.sort(nodes)
        inv_nodes[nodes] = np.argsort(nodes)

        def get_edges(i, channel_groups):
          edges = []
          pos_x, pos_y = channel_groups['geometry'][i]
          for c2 in channel_groups['channels']:
              pos_x2, pos_y2 = channel_groups['geometry'][c2]
              if (((pos_x - pos_x2)**2 + (pos_y - pos_y2)**2) <= probe['radius']**2):
                  edges += [c2]
          return edges

        for count, i in enumerate(electrodes):
            for key in probe['channel_groups'].keys():
              if nodes[i] in probe['channel_groups'][key]['channels']:
                masks[count, idx[inv_nodes[get_edges(nodes[i], probe['channel_groups'][key])]]] = 1
    return templates, masks
def _read_amplitudes(basename, n_templates, n_spikes, spike_clusters):
    amplitudes = np.empty_like(spike_clusters, dtype=np.float32)
    spike_ids = np.arange(n_spikes, dtype=np.int32)
    spc = _spikes_per_cluster(spike_ids, spike_clusters)

    with open_h5(basename + '.amplitudes.mat', 'r') as f:
        for i in range(n_templates):
            amplitudes_i = f.read('/temp_' + str(i))[0,...]
            amplitudes[spc[i]] = amplitudes_i
    return amplitudes
def _read_spikes(basename):
    with open_h5(basename + '.spiketimes.mat', 'r') as f:
        spike_samples = {}
        for name in f.children():
            cluster = int(name.split('_')[1])
            samples = f.read(name)[:].ravel().astype(np.uint64)
            spike_samples[cluster] = samples
        clusters = np.sort(list(spike_samples.keys()))
        # n_clusters = len(clusters)
        counts = {cluster: len(spikes)
                  for cluster, spikes in spike_samples.items()}
        spikes = np.hstack([spike_samples[cluster]
                            for cluster in clusters])
        idx = np.argsort(spikes)
        spike_clusters = np.repeat(clusters, [counts[cluster]
                                              for cluster in clusters])
        return spikes[idx], spike_clusters[idx]
Exemple #6
0
def _load_spike_clusters():
    with open_h5('sc', 'r') as f:
        return f.read('/sc')[...]
Exemple #7
0
def _gen_spike_clusters():
    sc = np.random.randint(size=n_spikes, low=0, high=n_clusters)
    with open_h5('sc', 'w') as f:
        f.write('/sc', sc)
Exemple #8
0
def _gen_arr():
    arr = np.random.rand(n_spikes, n_channels).astype(np.float32)
    with open_h5('test', 'w') as f:
        f.write('/test', arr)
Exemple #9
0
def _load_spike_clusters():
    with open_h5('sc', 'r') as f:
        return f.read('/sc')[...]


def _reset_store():
    for path in (_store_path, '_flat'):
        if op.exists(path):
            shutil.rmtree(path)
        os.mkdir(path)


_gen_spike_clusters()
_gen_arr()

f = open_h5('test', 'r')

sc = _load_spike_clusters()
arr = f.read('/test')
spikes = np.arange(n_spikes)
spc = _spikes_per_cluster(spikes, sc)


def _flat_file(cluster):
    return op.join('_flat', str(cluster))


def _free_cache():
    os.system('sync')
    os.system('sudo sh -c "sync; echo 3 > /proc/sys/vm/drop_caches"')
def _load_spike_clusters():
    with open_h5('sc', 'r') as f:
        return f.read('/sc')[...]
def _gen_spike_clusters():
    sc = np.random.randint(size=n_spikes, low=0, high=n_clusters)
    with open_h5('sc', 'w') as f:
        f.write('/sc', sc)
def _gen_arr():
    arr = np.random.rand(n_spikes, n_channels).astype(np.float32)
    with open_h5('test', 'w') as f:
        f.write('/test', arr)
def _load_spike_clusters():
    with open_h5('sc', 'r') as f:
        return f.read('/sc')[...]


def _reset_store():
    for path in (_store_path, '_flat'):
        if op.exists(path):
            shutil.rmtree(path)
        os.mkdir(path)


_gen_spike_clusters()
_gen_arr()

f = open_h5('test', 'r')

sc = _load_spike_clusters()
arr = f.read('/test')
spikes = np.arange(n_spikes)
spc = _spikes_per_cluster(spikes, sc)


def _flat_file(cluster):
    return op.join('_flat', str(cluster))


def _free_cache():
    os.system('sync')
    os.system('sudo sh -c "sync; echo 3 > /proc/sys/vm/drop_caches"')