Esempio n. 1
0
def load_connectome_varshney():
    """
    Get gap junction and chemical synapse connectivity matrix from Varshney et al., 2011.
    The returned value is a tuple of (gap junction matrix, chem matrix)
    """
    return np.load(get_data_file_abs_path('Gg.npy')), np.load(
        get_data_file_abs_path('Gs.npy'))
Esempio n. 2
0
def main():
    """
    :return:
    """
    neuron_metadata_collection = NeuronMetadataCollection.load_from_chem_json(\
        get_data_file_abs_path('chem.json'))
    inhibitory_neuron_ids = np.argwhere(\
        np.load(get_data_file_abs_path('emask.npy')).flatten()).flatten()
    inhibitory_neuron_names = []
    for neuron_id in inhibitory_neuron_ids:
        metadata = neuron_metadata_collection.get_metadata(neuron_id)
        inhibitory_neuron_names.append(metadata.name)
    print(sorted(inhibitory_neuron_names))
Esempio n. 3
0
def load_connectome_cook():
    """
    Get gap junction and chemical synapse connectivity matrix from Cook et al., 2019.
    The returned value is a tuple of (gap junction matrix, chem matrix)
    """
    # Cook has extra neurons. It's on purpose that we use Varshney's list of neurons.
    neuron_metadata_collection = \
        NeuronMetadataCollection.load_from_chem_json(get_data_file_abs_path('chem.json'))
    conn_spec_to_weight_gap, conn_spec_to_weight_chem = load_connectome_dict_cook(
    )
    return (build_connectome_matrix_from_dict(conn_spec_to_weight_gap,
                                              neuron_metadata_collection),
            build_connectome_matrix_from_dict(conn_spec_to_weight_chem,
                                              neuron_metadata_collection))
Esempio n. 4
0
def load_connectome_dict_cook():
    """
    Get gap junction and chemical synapse connectivity matrix from Cook et al., 2019.
    The returned value is a tuple of (conn_spec_to_weight_gap, conn_spec_to_weight_chem)
    Each conn_spec_to_weight is a dictionary with key of conn_spec to weight
    conn_spec is a tuple (source neuron, target neuron)

    Usage:
      conn_spec_to_weight_gap, conn_spec_to_weight_chem = load_connectome_dict_cook()
      # This gives gap junction weight from ASHL to ASHR
      conn_spec_to_weight_gap[('ASHL', 'ASHR')]
    """
    connectome_file = 'herm_full_edgelist.csv'

    # key = conn_spec = (from, to)
    # value = total weight
    conn_spec_to_weight_chem = {}
    conn_spec_to_weight_gap = {}
    with open(get_data_file_abs_path(connectome_file), newline='') as csvfile:
        reader = csv.DictReader(csvfile)
        for row in reader:
            source = row['Source'].upper().strip()
            target = row['Target'].upper().strip()
            weight = row['Weight'].upper().strip()
            conn_type = row['Type'].upper().strip()
            conn_spec = (source, target)

            conn_spec_to_weight = None
            if conn_type == "CHEMICAL":
                conn_spec_to_weight = conn_spec_to_weight_chem
            elif conn_type == "ELECTRICAL":
                conn_spec_to_weight = conn_spec_to_weight_gap
            else:
                raise Exception("Invalid connection type: " + conn_type)

            if conn_spec in conn_spec_to_weight:
                raise Exception(
                    "Duplicate entry exists for %s. Previous value is %d, new value is %d" % \
                    (conn_type, conn_spec_to_weight[conn_spec], weight))
            conn_spec_to_weight[conn_spec] = weight
    return (conn_spec_to_weight_gap, conn_spec_to_weight_chem)
Esempio n. 5
0
# Simple script to run the neural model without displaying anything.
# Useful for debugging.

import numpy as np
import project_path
from model.data_accessor import get_data_file_abs_path
from model.neuron_metadata import *
from model.neural_model import NeuralModel

neuron_metadata_collection = NeuronMetadataCollection.load_from_chem_json(get_data_file_abs_path('chem.json'))
N = neuron_metadata_collection.get_size()

# Constant current injections will be made to these neurons.
stimulus = {
  "PLML": 1.4,
  "PLMR": 1.4,
}
# How many timesteps to run simulation for.
simul_time = 2000

# Initial condition
# If you want a fixed-seed initial condition, uncomment the line below.
# np.random.seed(0)
init_conds = 10**(-4)*np.random.normal(0, 0.94, 2*N)


model = NeuralModel(neuron_metadata_collection)
model.init_conds = init_conds

model.set_I_ext_constant_currents(stimulus)
model.init_kunert_2017()
import numpy as np
import pylab as plt
import project_path
from model.data_accessor import get_data_file_abs_path
from model.neuron_metadata import *
from model.neural_model import NeuralModel
import model.init_conds as init_conds
from util.plot_util import *

import pdb

neurons_to_observe = ["PLML", "PLMR"]

neuron_metadata_collection = NeuronMetadataCollection.load_from_chem_json(
    get_data_file_abs_path('chem.json'))
N = neuron_metadata_collection.get_size()
model = NeuralModel(neuron_metadata_collection)


def main():
    for neurons_to_stimulate in [["PLML"], ["PLML", "PLMR"]]:
        for stim_amp_nA in [0.001, 0.005, 0.01, 0.05, 0.1, 0.5, 1, 2, 5]:
            case_suffix = "_".join(neurons_to_stimulate) + "_" + str(
                stim_amp_nA)
            run_one_case(case_name="cook_static_vth_" + case_suffix,
                         neurons_to_stimulate=neurons_to_stimulate,
                         stim_amp_nA=stim_amp_nA,
                         use_cook_connectome=True,
                         use_static_vth=True)
            run_one_case(case_name="varshney_static_vth_" + case_suffix,