'type': 'fit', 'param': [Veq, 50.], 'E': Veq, 'calctype': 'pas' } } # initialize a greensTree. Here, all the quantities are stored to compute the GF in the # frequency domain (algorithm of Koch and Poggio, 1985). greenstree = morphR.greensTree(morphfile, soma_distr=s_distr, ionc_distr=d_distr, cnodesdistr='all') # initialize a greensFunctionCalculator using the previously created greensTree. This class # stores all variables necessary to compute the GF in a format fit for simulation, either # the plain time domain or with the partial fraction decomposition. gfcalc = morphR.greensFunctionCalculator(greenstree) gfcalc.set_impedances_logscale(fmax=7, base=10, num=200) # Now a list of input locations needs to be defined. For the sparse reformulation, the # first location needs to be the soma inlocs = [{ 'node': 1, 'x': .5, 'ID': 0 }, { 'node': 4, 'x': .5, 'ID': 1 }, { 'node': 5, 'x': .5, 'ID': 2
## initialization ##################################################################### ## Step 0: initialize the morphology # Specify the path to an '.swc' file. morphfile = 'morphologies/ball_and_stick_taper.swc' # Define the ion channel distributions for dendrites and soma. Here the neuron model is # passive. d_distr = {'L': {'type': 'fit', 'param': [Veq, 50.], 'E': Veq, 'calctype': 'pas'}} s_distr = {'L': {'type': 'fit', 'param': [Veq, 50.], 'E': Veq, 'calctype': 'pas'}} # initialize a greensTree. Here, all the quantities are stored to compute the GF in the # frequency domain (algorithm of Koch and Poggio, 1985). greenstree = morphR.greensTree(morphfile, soma_distr=s_distr, ionc_distr=d_distr, cnodesdistr='all') # initialize a greensFunctionCalculator using the previously created greensTree. This class # stores all variables necessary to compute the GF in a format fit for simulation, either # the plain time domain or with the partial fraction decomposition. gfcalc = morphR.greensFunctionCalculator(greenstree) gfcalc.set_impedances_logscale(fmax=7, base=10, num=200) # Now a list of input locations needs to be defined. For the sparse reformulation, the # first location needs to be the soma inlocs = [ {'node': 1, 'x': .5, 'ID': 0}, {'node': 4, 'x': .5, 'ID': 1}, {'node': 5, 'x': .5, 'ID': 2}, {'node': 6, 'x': .5, 'ID': 3}, {'node': 7, 'x': .5, 'ID': 4}, {'node': 8, 'x': .5, 'ID': 5}, {'node': 9, 'x': .5, 'ID': 6}] ## Steps 1,2,3 and 4: # find sets of nearest neighbours, computes the necessary GF kernels, then computes the # sparse kernels and then fits the partial fraction decomposition using the VF algorithm. alphas, gammas, pairs, Ms = gfcalc.kernelSet_sparse(inlocs, FFT=False, kernelconstants=True) ## Step 4 bis: compute the vectors that will be used in the simulation prep = neurM.preprocessor() mat_dict_hybrid = prep.construct_volterra_matrices_hybrid(dt, alphas, gammas, K, pprint=False) ## Examples of steps that happen within the kernelSet_sparse function ## Step 1: example to find the nearest neighbours
def measure_velocity(greenstree, greenstree_pas, node_inds_of_Ranvier, ind_ais, Vth=-20, run_NEURON=False, pprint=False, pplot=False, temp=18.5): # parameters dt = .025 tmax = 70. V0 = -65. inds = [n for n in range(6, 11)] number_of_nodes = 8 # initialize a greens function calculator gfcalc = morphR.greensFunctionCalculator(greenstree) gfcalc.set_impedances_logscale(fmax=7, base=10, num=200) gfcalc_pas = morphR.greensFunctionCalculator(greenstree_pas) gfcalc_pas.set_impedances_logscale(fmax=7, base=10, num=200) # input locations inlocs = [{ 'node': n, 'x': 0.5, 'ID': ind } for ind, n in enumerate([1] + [ind_ais] + node_inds_of_Ranvier)] # integration point conductances gs_point, es_point = morphR.get_axon_node_conductances( greenstree, node_inds_of_Ranvier, ind_ais) # input Iclamps = [{ 'ID': 0, 'x': inlocs[0]['x'], 'node': 1, 'delay': 15., 'dur': 2., 'amp': .5 }] # compute SGF alphas, gammas, pairs, Ms = gfcalc_pas.kernelSet_sparse( inlocs, FFT=False, kernelconstants=True, pprint=False) # preprocessor test prep = neurM.preprocessor() mat_dict_On = prep.construct_volterra_matrices_On(dt, alphas, gammas, pprint=False) sv_dict = prep.construct_ionchannel_matrices(inlocs, gs_point, es_point, temp=temp) I_in = prep.construct_current_input_matrix(dt, tmax, inlocs, Iclamps) # backwards integration axon1 = neurM.axon_vectorized(len(inlocs), sv_dict, mat_dict_On, E_eq=V0) result = axon1.run_volterra_back_On(tmax, dt, I_in=I_in) if run_NEURON: # run neuron neuron HHneuron = neurM.NeuronNeuron(greenstree, dt=dt, truemorph=False, factorlambda=10) HHneuron.add_Iclamp(Iclamps) HHneuron.add_recorder(inlocs) Vm = HHneuron.run(tdur=tmax, pprint=True) if pplot: if run_NEURON: pl.plot(Vm['t'], Vm[inds[0]], 'r') pl.plot(Vm['t'], Vm[inds[0] + number_of_nodes], 'b') pl.plot(Vm['t'], Vm[len(result['Vm']) - 1], 'g') pl.plot(result['t'], result['Vm'][inds[0]], 'r--', lw=2) pl.plot(result['t'], result['Vm'][inds[0] + number_of_nodes], 'b--', lw=2) pl.plot(result['t'], result['Vm'][-1], 'g--', lw=2) pl.show() # compute velocity v_list = [] if run_NEURON: v_list_NEURON = [] for j in inds: # i_rv = node_inds_of_Ranvier[j] t1 = threshold_crossing_time(result['Vm'][j], dt, Vth=Vth) t2 = threshold_crossing_time(result['Vm'][j + number_of_nodes], dt, Vth=Vth) node_ind_ranvier = node_inds_of_Ranvier[-1] node_ind_myelin = node_ind_ranvier - 1 node_ranvier = greenstree.tree.get_node_with_index(node_ind_ranvier) node_myelin = greenstree.tree.get_node_with_index(node_ind_myelin) Dx = number_of_nodes*(node_ranvier.get_content()['impedance'].length + \ node_myelin.get_content()['impedance'].length) * 1e-2 # m Dt = (t2 - t1) * 1e-3 # s v_list.append(Dx / Dt) # m/s if run_NEURON: t1_ = threshold_crossing_time(Vm[j], dt, Vth=Vth) t2_ = threshold_crossing_time(Vm[j + number_of_nodes], dt, Vth=Vth) Dt_ = (t2_ - t1_) * 1e-3 # s v_list_NEURON.append(Dx / Dt_) v_avg = np.mean(np.array(v_list)) if pprint: print 'velocity= ', v_avg, ' m/s' if run_NEURON: v_avg_NEURON = np.mean(np.array(v_list_NEURON)) if pprint: print v_avg_NEURON if run_NEURON: return v_avg, result, Vm else: return v_avg
## initialization ###################################################### morphfile = 'morphologies/stellate_v2.swc'#ball_and_stick_taper.swc'#ball_and_stick_taper.swc'#N19ttwt.CNG.swc'#3y_tree.swc'#neocortical_pyramidv2.swc'# # greenstree greenstree_sim = morphR.greensTree(morphfile, soma_distr=s_distr_sim, ionc_distr=distr_sim, pprint=False) # greens tree # greenstree_calc = morphR.greensTree(morphfile, soma_distr=s_distr_calc, ionc_distr=distr_calc, pprint=False) greenstree_calc = copy.deepcopy(greenstree_sim) snode = greenstree_calc.tree.get_node_with_index(1) print 'number of dendrites: ', len(snode.get_child_nodes()[2:]) gs_soma = snode.get_content()['physiology'].gs print gs_soma print snode.get_content()['physiology'].es for key in gs_soma.keys(): gs_soma[key] = 0. gfcalc = morphR.greensFunctionCalculator(greenstree_calc) gfcalc.set_impedances_logscale(fmax=7, base=10, num=200) inlocs = greenstree_calc.distribute_inlocs(num=50, distrtype='random', radius=0.0070) # inlocs = [{'node': 1, 'x': 0.5, 'ID': 0}, {'node': 18, 'x': 0.6, 'ID': 1}] # (inlocs, inlocs_2) = greenstree_calc.distribute_inlocs(num=15, distrtype='fromleaf', radius=0.0120, split_radius=0.0050) # print inlocs # print inlocs_2 print '\n>>> number of input locations = ', len(inlocs) # print '\n>>> number of input locations avg = ', len(inlocs_2) # calculate dendritic length btst = btstats.BTStats(greenstree_calc.tree) Ltot = btst.total_length() print '\n>>> total length = ', Ltot, 'um' # simulation gfcalc gfcalc_sim = morphR.greensFunctionCalculator(greenstree_sim) gfcalc_sim.set_impedances_logscale(fmax=7, base=10, num=2)
def measure_velocity(greenstree, greenstree_pas, node_inds_of_Ranvier, ind_ais, Vth=-20, run_NEURON=False, pprint=False, pplot=False, temp=18.5): # parameters dt = .025 tmax = 70. V0 = -65. inds = [n for n in range(6,11)] number_of_nodes = 8 # initialize a greens function calculator gfcalc = morphR.greensFunctionCalculator(greenstree) gfcalc.set_impedances_logscale(fmax=7, base=10, num=200) gfcalc_pas = morphR.greensFunctionCalculator(greenstree_pas) gfcalc_pas.set_impedances_logscale(fmax=7, base=10, num=200) # input locations inlocs = [{'node': n, 'x': 0.5, 'ID': ind} for ind, n in enumerate([1] + [ind_ais] + node_inds_of_Ranvier)] # integration point conductances gs_point, es_point = morphR.get_axon_node_conductances(greenstree, node_inds_of_Ranvier, ind_ais) # input Iclamps = [{'ID': 0, 'x': inlocs[0]['x'], 'node':1, 'delay': 15. , 'dur': 2., 'amp': .5}] # compute SGF alphas, gammas, pairs, Ms = gfcalc_pas.kernelSet_sparse(inlocs, FFT=False, kernelconstants=True, pprint=False) # preprocessor test prep = neurM.preprocessor() mat_dict_On = prep.construct_volterra_matrices_On(dt, alphas, gammas, pprint=False) sv_dict = prep.construct_ionchannel_matrices(inlocs, gs_point, es_point, temp=temp) I_in = prep.construct_current_input_matrix(dt, tmax, inlocs, Iclamps) # backwards integration axon1 = neurM.axon_vectorized(len(inlocs), sv_dict, mat_dict_On, E_eq=V0) result = axon1.run_volterra_back_On(tmax, dt, I_in=I_in) if run_NEURON: # run neuron neuron HHneuron = neurM.NeuronNeuron(greenstree, dt=dt, truemorph=False, factorlambda=10) HHneuron.add_Iclamp(Iclamps) HHneuron.add_recorder(inlocs) Vm = HHneuron.run(tdur=tmax, pprint=True) if pplot: if run_NEURON: pl.plot(Vm['t'], Vm[inds[0]], 'r') pl.plot(Vm['t'], Vm[inds[0]+number_of_nodes], 'b') pl.plot(Vm['t'], Vm[len(result['Vm'])-1], 'g') pl.plot(result['t'], result['Vm'][inds[0]], 'r--', lw=2) pl.plot(result['t'], result['Vm'][inds[0]+number_of_nodes], 'b--', lw=2) pl.plot(result['t'], result['Vm'][-1], 'g--', lw=2) pl.show() # compute velocity v_list = [] if run_NEURON: v_list_NEURON = [] for j in inds: # i_rv = node_inds_of_Ranvier[j] t1 = threshold_crossing_time(result['Vm'][j], dt, Vth=Vth) t2 = threshold_crossing_time(result['Vm'][j+number_of_nodes], dt, Vth=Vth) node_ind_ranvier = node_inds_of_Ranvier[-1] node_ind_myelin = node_ind_ranvier - 1 node_ranvier = greenstree.tree.get_node_with_index(node_ind_ranvier) node_myelin = greenstree.tree.get_node_with_index(node_ind_myelin) Dx = number_of_nodes*(node_ranvier.get_content()['impedance'].length + \ node_myelin.get_content()['impedance'].length) * 1e-2 # m Dt = (t2 - t1) * 1e-3 # s v_list.append(Dx/Dt) # m/s if run_NEURON: t1_ = threshold_crossing_time(Vm[j], dt, Vth=Vth) t2_ = threshold_crossing_time(Vm[j+number_of_nodes], dt, Vth=Vth) Dt_ = (t2_ - t1_) * 1e-3 # s v_list_NEURON.append(Dx/Dt_) v_avg = np.mean(np.array(v_list)) if pprint: print 'velocity= ', v_avg, ' m/s' if run_NEURON: v_avg_NEURON = np.mean(np.array(v_list_NEURON)) if pprint: print v_avg_NEURON if run_NEURON: return v_avg, result, Vm else: return v_avg