예제 #1
0
def test_cell_response(tmpdir):
    """Test CellResponse object."""

    # Round-trip test
    spike_times = [[2.3456, 7.89], [4.2812, 93.2]]
    spike_gids = [[1, 3], [5, 7]]
    spike_types = [['L2_pyramidal', 'L2_basket'],
                   ['L5_pyramidal', 'L5_basket']]
    tstart, tstop, fs = 0.1, 98.4, 1000.
    sim_times = np.arange(tstart, tstop, 1 / fs)
    gid_ranges = {
        'L2_pyramidal': range(1, 2),
        'L2_basket': range(3, 4),
        'L5_pyramidal': range(5, 6),
        'L5_basket': range(7, 8)
    }
    cell_response = CellResponse(spike_times=spike_times,
                                 spike_gids=spike_gids,
                                 spike_types=spike_types,
                                 times=sim_times)
    cell_response.plot_spikes_hist(show=False)
    cell_response.write(tmpdir.join('spk_%d.txt'))
    assert cell_response == read_spikes(tmpdir.join('spk_*.txt'))
    assert ("CellResponse | 2 simulation trials" in repr(cell_response))

    # reset clears all recorded variables, but leaves simulation time intact
    assert len(cell_response.times) == len(sim_times)
    sim_attributes = [
        '_spike_times', '_spike_gids', '_spike_types', '_vsoma', '_isoma'
    ]
    net_attributes = ['_times', '_cell_type_names']  # `Network.__init__`
    # creates these check that we always know which response attributes are
    # simulated see #291 for discussion; objective is to keep cell_response
    # size small
    assert list(cell_response.__dict__.keys()) == \
        sim_attributes + net_attributes

    # Test recovery of empty spike files
    empty_spike = CellResponse(spike_times=[[], []],
                               spike_gids=[[], []],
                               spike_types=[[], []])
    empty_spike.write(tmpdir.join('empty_spk_%d.txt'))
    assert empty_spike == read_spikes(tmpdir.join('empty_spk_*.txt'))

    assert ("CellResponse | 2 simulation trials" in repr(empty_spike))

    with pytest.raises(TypeError,
                       match="spike_times should be a list of lists"):
        cell_response = CellResponse(spike_times=([2.3456,
                                                   7.89], [4.2812, 93.2]),
                                     spike_gids=spike_gids,
                                     spike_types=spike_types)

    with pytest.raises(TypeError,
                       match="spike_times should be a list of lists"):
        cell_response = CellResponse(spike_times=[1, 2],
                                     spike_gids=spike_gids,
                                     spike_types=spike_types)

    with pytest.raises(ValueError,
                       match="spike times, gids, and types should "
                       "be lists of the same length"):
        cell_response = CellResponse(spike_times=[[2.3456, 7.89]],
                                     spike_gids=spike_gids,
                                     spike_types=spike_types)

    cell_response = CellResponse(spike_times=spike_times,
                                 spike_gids=spike_gids,
                                 spike_types=spike_types)

    with pytest.raises(TypeError,
                       match="indices must be int, slice, or "
                       "array-like, not str"):
        cell_response['1']

    with pytest.raises(TypeError,
                       match="indices must be int, slice, or "
                       "array-like, not float"):
        cell_response[1.0]

    with pytest.raises(ValueError, match="ndarray cannot exceed 1 dimension"):
        cell_response[np.array([[1, 2], [3, 4]])]

    with pytest.raises(TypeError,
                       match="gids must be of dtype int, "
                       "not float64"):
        cell_response[np.array([1, 2, 3.0])]

    with pytest.raises(TypeError,
                       match="gids must be of dtype int, "
                       "not float64"):
        cell_response[[0, 1, 2, 2.0]]

    with pytest.raises(TypeError,
                       match="spike_types should be str, "
                       "list, dict, or None"):
        cell_response.plot_spikes_hist(spike_types=1, show=False)

    with pytest.raises(TypeError,
                       match=r"spike_types\[ev\] must be a list\. "
                       r"Got int\."):
        cell_response.plot_spikes_hist(spike_types={'ev': 1}, show=False)

    with pytest.raises(ValueError,
                       match=r"Elements of spike_types must map to"
                       r" mutually exclusive input types\. L2_basket is found"
                       r" more than once\."):
        cell_response.plot_spikes_hist(
            spike_types={'ev': ['L2_basket', 'L2_b']}, show=False)

    with pytest.raises(ValueError, match="No input types found for ABC"):
        cell_response.plot_spikes_hist(spike_types='ABC', show=False)

    with pytest.raises(ValueError,
                       match="tstart and tstop must be of type "
                       "int or float"):
        cell_response.mean_rates(tstart=0.1,
                                 tstop='ABC',
                                 gid_ranges=gid_ranges)

    with pytest.raises(ValueError, match="tstop must be greater than tstart"):
        cell_response.mean_rates(tstart=0.1, tstop=-1.0, gid_ranges=gid_ranges)

    with pytest.raises(ValueError,
                       match="Invalid mean_type. Valid "
                       "arguments include 'all', 'trial', or 'cell'."):
        cell_response.mean_rates(tstart=tstart,
                                 tstop=tstop,
                                 gid_ranges=gid_ranges,
                                 mean_type='ABC')

    test_rate = (1 / (tstop - tstart)) * 1000

    assert cell_response.mean_rates(tstart, tstop, gid_ranges) == {
        'L5_pyramidal': test_rate / 2,
        'L5_basket': test_rate / 2,
        'L2_pyramidal': test_rate / 2,
        'L2_basket': test_rate / 2
    }
    assert cell_response.mean_rates(tstart,
                                    tstop,
                                    gid_ranges,
                                    mean_type='trial') == {
                                        'L5_pyramidal': [0.0, test_rate],
                                        'L5_basket': [0.0, test_rate],
                                        'L2_pyramidal': [test_rate, 0.0],
                                        'L2_basket': [test_rate, 0.0]
                                    }
    assert cell_response.mean_rates(tstart,
                                    tstop,
                                    gid_ranges,
                                    mean_type='cell') == {
                                        'L5_pyramidal': [[0.0], [test_rate]],
                                        'L5_basket': [[0.0], [test_rate]],
                                        'L2_pyramidal': [[test_rate], [0.0]],
                                        'L2_basket': [[test_rate], [0.0]]
                                    }

    # Write spike file with no 'types' column
    for fname in sorted(glob(str(tmpdir.join('spk_*.txt')))):
        times_gids_only = np.loadtxt(fname, dtype=str)[:, (0, 1)]
        np.savetxt(fname, times_gids_only, delimiter='\t', fmt='%s')

    # Check that spike_types are updated according to gid_ranges
    cell_response = read_spikes(tmpdir.join('spk_*.txt'),
                                gid_ranges=gid_ranges)
    assert cell_response.spike_types == spike_types

    # Check for gid_ranges errors
    with pytest.raises(ValueError,
                       match="gid_ranges must be provided if "
                       "spike types are unspecified in the file "):
        cell_response = read_spikes(tmpdir.join('spk_*.txt'))
    with pytest.raises(ValueError,
                       match="gid_ranges should contain only "
                       "disjoint sets of gid values"):
        gid_ranges = {
            'L2_pyramidal': range(3),
            'L2_basket': range(2, 4),
            'L5_pyramidal': range(4, 6),
            'L5_basket': range(6, 8)
        }
        cell_response = read_spikes(tmpdir.join('spk_*.txt'),
                                    gid_ranges=gid_ranges)
예제 #2
0
def test_network():
    """Test network object."""
    hnn_core_root = op.dirname(hnn_core.__file__)
    params_fname = op.join(hnn_core_root, 'param', 'default.json')
    params = read_params(params_fname)
    # add rhythmic inputs (i.e., a type of common input)
    params.update({
        'input_dist_A_weight_L2Pyr_ampa': 5.4e-5,
        'input_dist_A_weight_L5Pyr_ampa': 5.4e-5,
        't0_input_dist': 50,
        'input_prox_A_weight_L2Pyr_ampa': 5.4e-5,
        'input_prox_A_weight_L5Pyr_ampa': 5.4e-5,
        't0_input_prox': 50
    })
    net = Network(deepcopy(params), add_drives_from_params=True)
    network_builder = NetworkBuilder(net)  # needed to populate net.cells

    # Assert that params are conserved across Network initialization
    for p in params:
        assert params[p] == net._params[p]
    assert len(params) == len(net._params)
    print(network_builder)
    print(network_builder.cells[:2])

    # Assert that proper number of gids are created for Network drives
    dns_from_gids = [
        name for name in net.gid_ranges.keys() if name not in net.cellname_list
    ]
    assert len(dns_from_gids) == len(net.external_drives)
    for dn in dns_from_gids:
        assert dn in net.external_drives.keys()
        this_src_gids = set([
            gid for drive_conn in net.external_drives[dn]['conn'].values()
            for gid in drive_conn['src_gids']
        ])  # NB set: globals
        assert len(net.gid_ranges[dn]) == len(this_src_gids)
        assert len(net.external_drives[dn]['events']) == 1  # single trial!

    assert len(net.gid_ranges['bursty1']) == 1
    for drive in net.external_drives.values():
        assert len(drive['events']) == 1  # single trial simulated
        if drive['type'] == 'evoked':
            for kw in ['mu', 'sigma', 'numspikes']:
                assert kw in drive['dynamics'].keys()
            assert len(drive['events'][0]) == net.n_cells
            # this also implicitly tests that events are always a list
            assert len(drive['events'][0][0]) == drive['dynamics']['numspikes']
        elif drive['type'] == 'gaussian':
            for kw in ['mu', 'sigma', 'numspikes']:
                assert kw in drive['dynamics'].keys()
            assert len(drive['events'][0]) == net.n_cells
        elif drive['type'] == 'poisson':
            for kw in ['tstart', 'tstop', 'rate_constant']:
                assert kw in drive['dynamics'].keys()
            assert len(drive['events'][0]) == net.n_cells

        elif drive['type'] == 'bursty':
            for kw in [
                    'distribution', 'tstart', 'tstart_std', 'tstop',
                    'burst_rate', 'burst_std', 'numspikes', 'repeats'
            ]:
                assert kw in drive['dynamics'].keys()
            assert len(drive['events'][0]) == 1
            n_events = (
                drive['dynamics']['numspikes'] *  # 2
                drive['dynamics']['repeats'] *  # 10
                (1 +
                 (drive['dynamics']['tstop'] - drive['dynamics']['tstart'] - 1)
                 // (1000. / drive['dynamics']['burst_rate'])))
            assert len(drive['events'][0][0]) == n_events  # 40

    # make sure the PRNGs are consistent.
    target_times = {
        'evdist1': [66.30498327062551, 61.54362532343694],
        'evprox1': [23.80641637082997, 30.857310915553647],
        'evprox2': [141.76252038319825, 137.73942375578602]
    }
    for drive_name in target_times:
        for idx in [0, -1]:  # first and last
            assert_allclose(net.external_drives[drive_name]['events'][0][idx],
                            target_times[drive_name][idx],
                            rtol=1e-12)

    # check select AMPA weights
    target_weights = {
        'evdist1': {
            'L2_basket': 0.006562,
            'L5_pyramidal': 0.142300
        },
        'evprox1': {
            'L2_basket': 0.08831,
            'L5_pyramidal': 0.00865
        },
        'evprox2': {
            'L2_basket': 0.000003,
            'L5_pyramidal': 0.684013
        }
    }
    for drive_name in target_weights:
        for cellname in target_weights[drive_name]:
            assert_allclose(net.external_drives[drive_name]['conn'][cellname]
                            ['ampa']['A_weight'],
                            target_weights[drive_name][cellname],
                            rtol=1e-12)

    # check select synaptic delays
    target_delays = {
        'evdist1': {
            'L2_basket': 0.1,
            'L5_pyramidal': 0.1
        },
        'evprox1': {
            'L2_basket': 0.1,
            'L5_pyramidal': 1.
        },
        'evprox2': {
            'L2_basket': 0.1,
            'L5_pyramidal': 1.
        }
    }
    for drive_name in target_delays:
        for cellname in target_delays[drive_name]:
            assert_allclose(net.external_drives[drive_name]['conn'][cellname]
                            ['ampa']['A_delay'],
                            target_delays[drive_name][cellname],
                            rtol=1e-12)

    # Assert that an empty CellResponse object is created as an attribute
    assert net.cell_response == CellResponse()
    # array of simulation times is created in Network.__init__, but passed
    # to CellResponse-constructor for storage (Network is agnostic of time)
    with pytest.raises(TypeError,
                       match="'times' is an np.ndarray of simulation times"):
        _ = CellResponse(times=[1, 2, 3])

    # Assert that all external drives are initialized
    n_evoked_sources = net.n_cells * 3
    n_pois_sources = net.n_cells
    n_gaus_sources = net.n_cells
    n_bursty_sources = 2

    # test that expected number of external driving events are created
    assert len(
        network_builder._drive_cells) == (n_evoked_sources + n_pois_sources +
                                          n_gaus_sources + n_bursty_sources)
    assert len(network_builder._gid_list) ==\
        len(network_builder._drive_cells) + net.n_cells
    # first 'evoked drive' comes after real cells and common inputs
    assert network_builder._drive_cells[2].gid ==\
        net.n_cells + n_bursty_sources

    # Assert that netcons are created properly
    # proximal
    assert 'L2Pyr_L2Pyr_nmda' in network_builder.ncs
    n_pyr = len(net.gid_ranges['L2_pyramidal'])
    n_connections = 3 * (n_pyr**2 - n_pyr)  # 3 synapses / cell
    assert len(network_builder.ncs['L2Pyr_L2Pyr_nmda']) == n_connections
    nc = network_builder.ncs['L2Pyr_L2Pyr_nmda'][0]
    assert nc.threshold == params['threshold']

    # create a new connection between cell types
    net = Network(deepcopy(params), add_drives_from_params=True)
    nc_dict = {'A_delay': 1, 'A_weight': 1e-5, 'lamtha': 20, 'threshold': 0.5}
    net._all_to_all_connect('bursty1',
                            'L5_basket',
                            'soma',
                            'gabaa',
                            nc_dict,
                            unique=False)
    network_builder = NetworkBuilder(net)
    assert 'bursty1_L5Basket_gabaa' in network_builder.ncs
    n_conn = len(net.gid_ranges['bursty1']) * len(net.gid_ranges['L5_basket'])
    assert len(network_builder.ncs['bursty1_L5Basket_gabaa']) == n_conn

    # try unique=True
    net = Network(deepcopy(params), add_drives_from_params=True)
    net._all_to_all_connect('extgauss',
                            'L5_basket',
                            'soma',
                            'gabaa',
                            nc_dict,
                            unique=True)
    network_builder = NetworkBuilder(net)
    n_conn = len(net.gid_ranges['L5_basket'])
    assert len(network_builder.ncs['extgauss_L5Basket_gabaa']) == n_conn

    # Test inputs for connectivity API
    net = Network(deepcopy(params), add_drives_from_params=True)
    n_conn = len(network_builder.ncs['L2Basket_L2Pyr_gabaa'])
    kwargs_default = dict(src_gids=[0, 1],
                          target_gids=[35, 36],
                          loc='soma',
                          receptor='gabaa',
                          weight=5e-4,
                          delay=1.0,
                          lamtha=1e9)
    net.add_connection(**kwargs_default)  # smoke test
    network_builder = NetworkBuilder(net)
    assert len(network_builder.ncs['L2Basket_L2Pyr_gabaa']) == n_conn + 4
    nc = network_builder.ncs['L2Basket_L2Pyr_gabaa'][-1]
    assert_allclose(nc.weight[0], kwargs_default['weight'])

    kwargs_good = [('src_gids', 0), ('src_gids', 'L2_pyramidal'),
                   ('src_gids', range(2)), ('target_gids', 35),
                   ('target_gids', range(2)), ('target_gids', 'L2_pyramidal'),
                   ('target_gids', [[35, 36], [37, 38]])]
    for arg, item in kwargs_good:
        kwargs = kwargs_default.copy()
        kwargs[arg] = item
        net.add_connection(**kwargs)

    kwargs_bad = [('src_gids', 0.0), ('src_gids', [0.0]),
                  ('target_gids', 35.0), ('target_gids', [35.0]),
                  ('target_gids', [[35], [36.0]]), ('loc', 1.0),
                  ('receptor', 1.0), ('weight', '1.0'), ('delay', '1.0'),
                  ('lamtha', '1.0')]
    for arg, item in kwargs_bad:
        match = ('must be an instance of')
        with pytest.raises(TypeError, match=match):
            kwargs = kwargs_default.copy()
            kwargs[arg] = item
            net.add_connection(**kwargs)

    kwargs_bad = [('src_gids', -1), ('src_gids', [-1]), ('target_gids', -1),
                  ('target_gids', [-1]), ('target_gids', [[35], [-1]]),
                  ('target_gids', [[35]]), ('src_gids', [0, 100]),
                  ('target_gids', [0, 100])]
    for arg, item in kwargs_bad:
        with pytest.raises(AssertionError):
            kwargs = kwargs_default.copy()
            kwargs[arg] = item
            net.add_connection(**kwargs)

    for arg in ['src_gids', 'target_gids', 'loc', 'receptor']:
        string_arg = 'invalid_string'
        match = f"Invalid value for the '{arg}' parameter"
        with pytest.raises(ValueError, match=match):
            kwargs = kwargs_default.copy()
            kwargs[arg] = string_arg
            net.add_connection(**kwargs)

    net.clear_connectivity()
    assert len(net.connectivity) == 0
예제 #3
0
def test_network():
    """Test network object."""
    hnn_core_root = op.dirname(hnn_core.__file__)
    params_fname = op.join(hnn_core_root, 'param', 'default.json')
    params = read_params(params_fname)
    # add rhythmic inputs (i.e., a type of common input)
    params.update({
        'input_dist_A_weight_L2Pyr_ampa': 5.4e-5,
        'input_dist_A_weight_L5Pyr_ampa': 5.4e-5,
        't0_input_dist': 50,
        'input_prox_A_weight_L2Pyr_ampa': 5.4e-5,
        'input_prox_A_weight_L5Pyr_ampa': 5.4e-5,
        't0_input_prox': 50
    })
    net = Network(deepcopy(params))
    network_builder = NetworkBuilder(net)  # needed to populate net.cells

    # Assert that params are conserved across Network initialization
    for p in params:
        assert params[p] == net.params[p]
    assert len(params) == len(net.params)
    print(network_builder)
    print(network_builder.cells[:2])

    # Assert that proper number of gids are created for Network inputs
    assert len(net.gid_ranges['common']) == 2
    assert len(net.gid_ranges['extgauss']) == net.n_cells
    assert len(net.gid_ranges['extpois']) == net.n_cells
    for ev_input in params['t_ev*']:
        type_key = ev_input[2:-2] + ev_input[-1]
        assert len(net.gid_ranges[type_key]) == net.n_cells

    # Assert that an empty CellResponse object is created as an attribute
    assert net.cell_response == CellResponse()
    # array of simulation times is created in Network.__init__, but passed
    # to CellResponse-constructor for storage (Network is agnostic of time)
    with pytest.raises(TypeError,
                       match="'times' is an np.ndarray of simulation times"):
        _ = CellResponse(times=[1, 2, 3])

    # Assert that all external feeds are initialized
    n_evoked_sources = net.n_cells * 3
    n_pois_sources = net.n_cells
    n_gaus_sources = net.n_cells
    n_common_sources = 2

    # test that expected number of external driving events are created, and
    # make sure the PRNGs are consistent.
    assert isinstance(net.feed_times, dict)
    # single trial simulated
    assert all(
        len(src_feed_times) == 1
        for src_type, src_feed_times in net.feed_times.items()
        if src_type != 'tonic')
    assert len(net.feed_times['common'][0]) == n_common_sources
    assert len(net.feed_times['common'][0][0]) == 40  # 40 spikes
    assert isinstance(net.feed_times['evprox1'][0][0], list)
    assert len(net.feed_times['evprox1'][0]) == net.n_cells
    assert_allclose(net.feed_times['evprox1'][0][0], [23.80641637082997],
                    rtol=1e-12)

    assert len(
        network_builder._feed_cells) == (n_evoked_sources + n_pois_sources +
                                         n_gaus_sources + n_common_sources)
    assert len(network_builder._gid_list) ==\
        len(network_builder._feed_cells) + net.n_cells
    # first 'evoked feed' comes after real cells and common inputs
    assert network_builder._feed_cells[2].gid == net.n_cells + n_common_sources

    # Assert that netcons are created properly
    # proximal
    assert 'L2Pyr_L2Pyr_nmda' in network_builder.ncs
    n_pyr = len(net.gid_ranges['L2_pyramidal'])
    n_connections = 3 * (n_pyr**2 - n_pyr)  # 3 synapses / cell
    assert len(network_builder.ncs['L2Pyr_L2Pyr_nmda']) == n_connections
    nc = network_builder.ncs['L2Pyr_L2Pyr_nmda'][0]
    assert nc.threshold == params['threshold']

    # create a new connection between cell types
    nc_dict = {'A_delay': 1, 'A_weight': 1e-5, 'lamtha': 20, 'threshold': 0.5}
    network_builder._connect_celltypes('common',
                                       'L5Basket',
                                       'soma',
                                       'gabaa',
                                       nc_dict,
                                       unique=False)
    assert 'common_L5Basket_gabaa' in network_builder.ncs
    n_conn = len(net.gid_ranges['common']) * len(net.gid_ranges['L5_basket'])
    assert len(network_builder.ncs['common_L5Basket_gabaa']) == n_conn

    # try unique=True
    network_builder._connect_celltypes('extgauss',
                                       'L5Basket',
                                       'soma',
                                       'gabaa',
                                       nc_dict,
                                       unique=True)
    n_conn = len(net.gid_ranges['L5_basket'])
    assert len(network_builder.ncs['extgauss_L5Basket_gabaa']) == n_conn
예제 #4
0
def test_network():
    """Test network object."""
    params = read_params(params_fname)
    # add rhythmic inputs (i.e., a type of common input)
    params.update({
        'input_dist_A_weight_L2Pyr_ampa': 1.4e-5,
        'input_dist_A_weight_L5Pyr_ampa': 2.4e-5,
        't0_input_dist': 50,
        'input_prox_A_weight_L2Pyr_ampa': 3.4e-5,
        'input_prox_A_weight_L5Pyr_ampa': 4.4e-5,
        't0_input_prox': 50
    })

    net = jones_2009_model(deepcopy(params), add_drives_from_params=True)
    # instantiate drive events for NetworkBuilder
    net._instantiate_drives(tstop=params['tstop'], n_trials=params['N_trials'])
    network_builder = NetworkBuilder(net)  # needed to instantiate cells

    # Assert that params are conserved across Network initialization
    for p in params:
        assert params[p] == net._params[p]
    assert len(params) == len(net._params)
    print(network_builder)
    print(network_builder._cells[:2])

    # Assert that proper number/types of gids are created for Network drives
    dns_from_gids = [
        name for name in net.gid_ranges.keys() if name not in net.cell_types
    ]
    assert sorted(dns_from_gids) == sorted(net.external_drives.keys())
    for dn in dns_from_gids:
        n_drive_cells = net.external_drives[dn]['n_drive_cells']
        assert len(net.gid_ranges[dn]) == n_drive_cells

    # Check drive dict structure for each external drive
    for drive in net.external_drives.values():
        # Check that connectivity sources correspond to gid_ranges
        conn_idxs = pick_connection(net, src_gids=drive['name'])
        this_src_gids = set([
            gid for conn_idx in conn_idxs
            for gid in net.connectivity[conn_idx]['src_gids']
        ])  # NB set: globals
        assert sorted(this_src_gids) == list(net.gid_ranges[drive['name']])
        # Check type-specific dynamics and events
        n_drive_cells = drive['n_drive_cells']
        assert len(drive['events']) == 1  # single trial simulated
        if drive['type'] == 'evoked':
            for kw in ['mu', 'sigma', 'numspikes']:
                assert kw in drive['dynamics'].keys()
            assert len(drive['events'][0]) == n_drive_cells
            # this also implicitly tests that events are always a list
            assert len(drive['events'][0][0]) == drive['dynamics']['numspikes']
        elif drive['type'] == 'gaussian':
            for kw in ['mu', 'sigma', 'numspikes']:
                assert kw in drive['dynamics'].keys()
            assert len(drive['events'][0]) == n_drive_cells
        elif drive['type'] == 'poisson':
            for kw in ['tstart', 'tstop', 'rate_constant']:
                assert kw in drive['dynamics'].keys()
            assert len(drive['events'][0]) == n_drive_cells
        elif drive['type'] == 'bursty':
            for kw in [
                    'tstart', 'tstart_std', 'tstop', 'burst_rate', 'burst_std',
                    'numspikes'
            ]:
                assert kw in drive['dynamics'].keys()
            assert len(drive['events'][0]) == n_drive_cells
            n_events = (
                drive['dynamics']['numspikes'] *  # 2
                (1 +
                 (drive['dynamics']['tstop'] - drive['dynamics']['tstart'] - 1)
                 // (1000. / drive['dynamics']['burst_rate'])))
            assert len(drive['events'][0][0]) == n_events  # 4

    # make sure the PRNGs are consistent.
    target_times = {
        'evdist1': [66.30498327062551, 61.54362532343694],
        'evprox1': [23.80641637082997, 30.857310915553647],
        'evprox2': [141.76252038319825, 137.73942375578602]
    }
    for drive_name in target_times:
        for idx in [0, -1]:  # first and last
            assert_allclose(net.external_drives[drive_name]['events'][0][idx],
                            target_times[drive_name][idx],
                            rtol=1e-12)

    # check select AMPA weights
    target_weights = {
        'evdist1': {
            'L2_basket': 0.006562,
            'L5_pyramidal': 0.142300
        },
        'evprox1': {
            'L2_basket': 0.08831,
            'L5_pyramidal': 0.00865
        },
        'evprox2': {
            'L2_basket': 0.000003,
            'L5_pyramidal': 0.684013
        },
        'bursty1': {
            'L2_pyramidal': 0.000034,
            'L5_pyramidal': 0.000044
        },
        'bursty2': {
            'L2_pyramidal': 0.000014,
            'L5_pyramidal': 0.000024
        }
    }
    for drive_name in target_weights:
        for target_type in target_weights[drive_name]:
            conn_idxs = pick_connection(net,
                                        src_gids=drive_name,
                                        target_gids=target_type,
                                        receptor='ampa')
            for conn_idx in conn_idxs:
                drive_conn = net.connectivity[conn_idx]
                assert_allclose(drive_conn['nc_dict']['A_weight'],
                                target_weights[drive_name][target_type],
                                rtol=1e-12)

    # check select synaptic delays
    target_delays = {
        'evdist1': {
            'L2_basket': 0.1,
            'L5_pyramidal': 0.1
        },
        'evprox1': {
            'L2_basket': 0.1,
            'L5_pyramidal': 1.
        },
        'evprox2': {
            'L2_basket': 0.1,
            'L5_pyramidal': 1.
        }
    }
    for drive_name in target_delays:
        for target_type in target_delays[drive_name]:
            conn_idxs = pick_connection(net,
                                        src_gids=drive_name,
                                        target_gids=target_type,
                                        receptor='ampa')
            for conn_idx in conn_idxs:
                drive_conn = net.connectivity[conn_idx]
                assert_allclose(drive_conn['nc_dict']['A_delay'],
                                target_delays[drive_name][target_type],
                                rtol=1e-12)

    # array of simulation times is created in Network.__init__, but passed
    # to CellResponse-constructor for storage (Network is agnostic of time)
    with pytest.raises(TypeError,
                       match="'times' is an np.ndarray of simulation times"):
        _ = CellResponse(times='blah')

    # Assert that all external drives are initialized
    # Assumes legacy mode where cell-specific drives create artificial cells
    # for all network cells regardless of connectivity
    n_evoked_sources = 3 * net._n_cells
    n_pois_sources = net._n_cells
    n_gaus_sources = net._n_cells
    n_bursty_sources = (net.external_drives['bursty1']['n_drive_cells'] +
                        net.external_drives['bursty2']['n_drive_cells'])
    # test that expected number of external driving events are created
    assert len(
        network_builder._drive_cells) == (n_evoked_sources + n_pois_sources +
                                          n_gaus_sources + n_bursty_sources)
    assert len(network_builder._gid_list) ==\
        len(network_builder._drive_cells) + net._n_cells
    # first 'evoked drive' comes after real cells and bursty drive cells
    assert network_builder._drive_cells[n_bursty_sources].gid ==\
        net._n_cells + n_bursty_sources

    # Assert that netcons are created properly
    n_pyr = len(net.gid_ranges['L2_pyramidal'])
    n_basket = len(net.gid_ranges['L2_basket'])

    # Check basket-basket connection where allow_autapses=False
    assert 'L2Pyr_L2Pyr_nmda' in network_builder.ncs
    n_connections = 3 * (n_pyr**2 - n_pyr)  # 3 synapses / cell
    assert len(network_builder.ncs['L2Pyr_L2Pyr_nmda']) == n_connections
    nc = network_builder.ncs['L2Pyr_L2Pyr_nmda'][0]
    assert nc.threshold == params['threshold']

    # Check bursty drives which use cell_specific=False
    assert 'bursty1_L2Pyr_ampa' in network_builder.ncs
    n_bursty1_sources = net.external_drives['bursty1']['n_drive_cells']
    n_connections = n_bursty1_sources * 3 * n_pyr  # 3 synapses / cell
    assert len(network_builder.ncs['bursty1_L2Pyr_ampa']) == n_connections
    nc = network_builder.ncs['bursty1_L2Pyr_ampa'][0]
    assert nc.threshold == params['threshold']

    # Check basket-basket connection where allow_autapses=True
    assert 'L2Basket_L2Basket_gabaa' in network_builder.ncs
    n_connections = n_basket**2  # 1 synapse / cell
    assert len(network_builder.ncs['L2Basket_L2Basket_gabaa']) == n_connections
    nc = network_builder.ncs['L2Basket_L2Basket_gabaa'][0]
    assert nc.threshold == params['threshold']

    # Check evoked drives which use cell_specific=True
    assert 'evdist1_L2Basket_nmda' in network_builder.ncs
    n_connections = n_basket  # 1 synapse / cell
    assert len(network_builder.ncs['evdist1_L2Basket_nmda']) == n_connections
    nc = network_builder.ncs['evdist1_L2Basket_nmda'][0]
    assert nc.threshold == params['threshold']

    # Test inputs for connectivity API
    net = jones_2009_model(deepcopy(params), add_drives_from_params=True)
    # instantiate drive events for NetworkBuilder
    net._instantiate_drives(tstop=params['tstop'], n_trials=params['N_trials'])
    n_conn = len(network_builder.ncs['L2Basket_L2Pyr_gabaa'])
    kwargs_default = dict(src_gids=[0, 1],
                          target_gids=[35, 36],
                          loc='soma',
                          receptor='gabaa',
                          weight=5e-4,
                          delay=1.0,
                          lamtha=1e9,
                          probability=1.0)
    net.add_connection(**kwargs_default)  # smoke test
    network_builder = NetworkBuilder(net)
    assert len(network_builder.ncs['L2Basket_L2Pyr_gabaa']) == n_conn + 4
    nc = network_builder.ncs['L2Basket_L2Pyr_gabaa'][-1]
    assert_allclose(nc.weight[0], kwargs_default['weight'])

    kwargs_good = [('src_gids', 0), ('src_gids', 'L2_pyramidal'),
                   ('src_gids', range(2)), ('target_gids', 35),
                   ('target_gids', range(2)), ('target_gids', 'L2_pyramidal'),
                   ('target_gids', [[35, 36], [37, 38]]), ('probability', 0.5)]
    for arg, item in kwargs_good:
        kwargs = kwargs_default.copy()
        kwargs[arg] = item
        net.add_connection(**kwargs)

    kwargs_bad = [('src_gids', 0.0), ('src_gids', [0.0]),
                  ('target_gids', 35.0), ('target_gids', [35.0]),
                  ('target_gids', [[35], [36.0]]), ('loc', 1.0),
                  ('receptor', 1.0), ('weight', '1.0'), ('delay', '1.0'),
                  ('lamtha', '1.0'), ('probability', '0.5')]
    for arg, item in kwargs_bad:
        match = ('must be an instance of')
        with pytest.raises(TypeError, match=match):
            kwargs = kwargs_default.copy()
            kwargs[arg] = item
            net.add_connection(**kwargs)

    kwargs_bad = [('src_gids', -1), ('src_gids', [-1]), ('target_gids', -1),
                  ('target_gids', [-1]), ('target_gids', [[35], [-1]]),
                  ('target_gids', [[35]]), ('src_gids', [0, 100]),
                  ('target_gids', [0, 100])]
    for arg, item in kwargs_bad:
        with pytest.raises(AssertionError):
            kwargs = kwargs_default.copy()
            kwargs[arg] = item
            net.add_connection(**kwargs)

    for arg in ['src_gids', 'target_gids', 'loc', 'receptor']:
        string_arg = 'invalid_string'
        match = f"Invalid value for the '{arg}' parameter"
        with pytest.raises(ValueError, match=match):
            kwargs = kwargs_default.copy()
            kwargs[arg] = string_arg
            net.add_connection(**kwargs)

    # Check probability=0.5 produces half as many connections as default
    net.add_connection(**kwargs_default)
    kwargs = kwargs_default.copy()
    kwargs['probability'] = 0.5
    net.add_connection(**kwargs)
    n_connections = np.sum(
        [len(t_gids) for t_gids in net.connectivity[-2]['gid_pairs'].values()])
    n_connections_new = np.sum(
        [len(t_gids) for t_gids in net.connectivity[-1]['gid_pairs'].values()])
    assert n_connections_new == np.round(n_connections * 0.5).astype(int)
    assert net.connectivity[-1]['probability'] == 0.5
    with pytest.raises(ValueError, match='probability must be'):
        kwargs = kwargs_default.copy()
        kwargs['probability'] = -1.0
        net.add_connection(**kwargs)

    # Test net.pick_connection()
    kwargs_default = dict(net=net,
                          src_gids=None,
                          target_gids=None,
                          loc=None,
                          receptor=None)

    kwargs_good = [('src_gids', 0), ('src_gids', 'L2_pyramidal'),
                   ('src_gids', range(2)), ('src_gids', None),
                   ('target_gids', 35), ('target_gids', range(2)),
                   ('target_gids', 'L2_pyramidal'), ('target_gids', None),
                   ('loc', 'soma'), ('loc', None), ('receptor', 'gabaa'),
                   ('receptor', None)]
    for arg, item in kwargs_good:
        kwargs = kwargs_default.copy()
        kwargs[arg] = item
        indices = pick_connection(**kwargs)
        for conn_idx in indices:
            if (arg == 'src_gids' or arg == 'target_gids') and \
                    isinstance(item, str):
                assert np.all(
                    np.in1d(net.connectivity[conn_idx][arg],
                            net.gid_ranges[item]))
            elif item is None:
                pass
            else:
                assert np.any(np.in1d([item], net.connectivity[conn_idx][arg]))

    # Check that a given gid isn't present in any connection profile that
    # pick_connection can't identify
    conn_idxs = pick_connection(net, src_gids=0)
    for conn_idx in range(len(net.connectivity)):
        if conn_idx not in conn_idxs:
            assert 0 not in net.connectivity[conn_idx]['src_gids']

    # Check that pick_connection returns empty lists when searching for
    # a drive targetting the wrong location
    conn_idxs = pick_connection(net, src_gids='evdist1', loc='proximal')
    assert len(conn_idxs) == 0
    assert not pick_connection(net, src_gids='evprox1', loc='distal')

    # Check condition where not connections match
    assert pick_connection(net, loc='distal', receptor='gabab') == list()

    kwargs_bad = [('src_gids', 0.0),
                  ('src_gids', [0.0]), ('target_gids', 35.0),
                  ('target_gids', [35.0]), ('target_gids', [35, [36.0]]),
                  ('loc', 1.0), ('receptor', 1.0)]
    for arg, item in kwargs_bad:
        match = ('must be an instance of')
        with pytest.raises(TypeError, match=match):
            kwargs = kwargs_default.copy()
            kwargs[arg] = item
            pick_connection(**kwargs)

    kwargs_bad = [('src_gids', -1), ('src_gids', [-1]), ('target_gids', -1),
                  ('target_gids', [-1]), ('src_gids', [35, -1]),
                  ('target_gids', [35, -1])]
    for arg, item in kwargs_bad:
        with pytest.raises(AssertionError):
            kwargs = kwargs_default.copy()
            kwargs[arg] = item
            pick_connection(**kwargs)

    for arg in ['src_gids', 'target_gids', 'loc', 'receptor']:
        string_arg = 'invalid_string'
        match = f"Invalid value for the '{arg}' parameter"
        with pytest.raises(ValueError, match=match):
            kwargs = kwargs_default.copy()
            kwargs[arg] = string_arg
            pick_connection(**kwargs)

    # Test removing connections from net.connectivity
    # Needs to be updated if number of drives change in preceeding tests
    net.clear_connectivity()
    assert len(net.connectivity) == 50
    net.clear_drives()
    assert len(net.connectivity) == 0
예제 #5
0
def test_cell_response(tmpdir):
    """Test CellResponse object."""

    # Round-trip test
    spike_times = [[2.3456, 7.89], [4.2812, 93.2]]
    spike_gids = [[1, 3], [5, 7]]
    spike_types = [['L2_pyramidal', 'L2_basket'],
                   ['L5_pyramidal', 'L5_basket']]
    tstart, tstop = 0.1, 98.4
    gid_ranges = {
        'L2_pyramidal': range(1, 2),
        'L2_basket': range(3, 4),
        'L5_pyramidal': range(5, 6),
        'L5_basket': range(7, 8)
    }
    cell_response = CellResponse(spike_times=spike_times,
                                 spike_gids=spike_gids,
                                 spike_types=spike_types)
    cell_response.plot_spikes_hist(show=False)
    cell_response.write(tmpdir.join('spk_%d.txt'))
    assert cell_response == read_spikes(tmpdir.join('spk_*.txt'))

    assert ("CellResponse | 2 simulation trials" in repr(cell_response))

    with pytest.raises(TypeError,
                       match="spike_times should be a list of lists"):
        cell_response = CellResponse(spike_times=([2.3456,
                                                   7.89], [4.2812, 93.2]),
                                     spike_gids=spike_gids,
                                     spike_types=spike_types)

    with pytest.raises(TypeError,
                       match="spike_times should be a list of lists"):
        cell_response = CellResponse(spike_times=[1, 2],
                                     spike_gids=spike_gids,
                                     spike_types=spike_types)

    with pytest.raises(ValueError,
                       match="spike times, gids, and types should "
                       "be lists of the same length"):
        cell_response = CellResponse(spike_times=[[2.3456, 7.89]],
                                     spike_gids=spike_gids,
                                     spike_types=spike_types)

    cell_response = CellResponse(spike_times=spike_times,
                                 spike_gids=spike_gids,
                                 spike_types=spike_types)

    with pytest.raises(TypeError,
                       match="indices must be int, slice, or "
                       "array-like, not str"):
        cell_response['1']

    with pytest.raises(TypeError,
                       match="indices must be int, slice, or "
                       "array-like, not float"):
        cell_response[1.0]

    with pytest.raises(ValueError, match="ndarray cannot exceed 1 dimension"):
        cell_response[np.array([[1, 2], [3, 4]])]

    with pytest.raises(TypeError,
                       match="gids must be of dtype int, "
                       "not float64"):
        cell_response[np.array([1, 2, 3.0])]

    with pytest.raises(TypeError,
                       match="gids must be of dtype int, "
                       "not float64"):
        cell_response[[0, 1, 2, 2.0]]

    with pytest.raises(TypeError,
                       match="spike_types should be str, "
                       "list, dict, or None"):
        cell_response.plot_spikes_hist(spike_types=1, show=False)

    with pytest.raises(TypeError,
                       match=r"spike_types\[ev\] must be a list\. "
                       r"Got int\."):
        cell_response.plot_spikes_hist(spike_types={'ev': 1}, show=False)

    with pytest.raises(ValueError,
                       match=r"Elements of spike_types must map to"
                       r" mutually exclusive input types\. L2_basket is found"
                       r" more than once\."):
        cell_response.plot_spikes_hist(
            spike_types={'ev': ['L2_basket', 'L2_b']}, show=False)

    with pytest.raises(ValueError, match="No input types found for ABC"):
        cell_response.plot_spikes_hist(spike_types='ABC', show=False)

    with pytest.raises(ValueError,
                       match="tstart and tstop must be of type "
                       "int or float"):
        cell_response.mean_rates(tstart=0.1,
                                 tstop='ABC',
                                 gid_ranges=gid_ranges)

    with pytest.raises(ValueError, match="tstop must be greater than tstart"):
        cell_response.mean_rates(tstart=0.1, tstop=-1.0, gid_ranges=gid_ranges)

    with pytest.raises(ValueError,
                       match="Invalid mean_type. Valid "
                       "arguments include 'all', 'trial', or 'cell'."):
        cell_response.mean_rates(tstart=tstart,
                                 tstop=tstop,
                                 gid_ranges=gid_ranges,
                                 mean_type='ABC')

    test_rate = (1 / (tstop - tstart)) * 1000

    assert cell_response.mean_rates(tstart, tstop, gid_ranges) == {
        'L5_pyramidal': test_rate / 2,
        'L5_basket': test_rate / 2,
        'L2_pyramidal': test_rate / 2,
        'L2_basket': test_rate / 2
    }
    assert cell_response.mean_rates(tstart,
                                    tstop,
                                    gid_ranges,
                                    mean_type='trial') == {
                                        'L5_pyramidal': [0.0, test_rate],
                                        'L5_basket': [0.0, test_rate],
                                        'L2_pyramidal': [test_rate, 0.0],
                                        'L2_basket': [test_rate, 0.0]
                                    }
    assert cell_response.mean_rates(tstart,
                                    tstop,
                                    gid_ranges,
                                    mean_type='cell') == {
                                        'L5_pyramidal': [[0.0], [test_rate]],
                                        'L5_basket': [[0.0], [test_rate]],
                                        'L2_pyramidal': [[test_rate], [0.0]],
                                        'L2_basket': [[test_rate], [0.0]]
                                    }

    # Write spike file with no 'types' column
    # Check for gid_ranges errors
    for fname in sorted(glob(str(tmpdir.join('spk_*.txt')))):
        times_gids_only = np.loadtxt(fname, dtype=str)[:, (0, 1)]
        np.savetxt(fname, times_gids_only, delimiter='\t', fmt='%s')
    with pytest.raises(ValueError,
                       match="gid_ranges must be provided if "
                       "spike types are unspecified in the file "):
        cell_response = read_spikes(tmpdir.join('spk_*.txt'))
    with pytest.raises(ValueError,
                       match="gid_ranges should contain only "
                       "disjoint sets of gid values"):
        gid_ranges = {
            'L2_pyramidal': range(3),
            'L2_basket': range(2, 4),
            'L5_pyramidal': range(4, 6),
            'L5_basket': range(6, 8)
        }
        cell_response = read_spikes(tmpdir.join('spk_*.txt'),
                                    gid_ranges=gid_ranges)