def plot_baseline_data(targets=None): baseline = {} file_name = 'TestGranNet.v.basedat' cols = [ 't', 'Gran/0/Granule_98/v', 'Gran/1/Granule_98/v', 'Gran/2/Granule_98/v' ] for c in cols: baseline[c] = [] for line in open(file_name): values = line.split() for vi in range(len(values)): baseline[cols[vi]].append(float(values[vi]) * 1000) volts = {} for k in baseline.keys(): if k != 't': volts[k] = baseline[k] utils.simple_network_analysis(volts, baseline['t'], targets=targets, plot=True, show_plot_already=False)
def plot_baseline_data(targets = None): baseline = {} file_name = 'TestGranNet.v.basedat' cols = ['t', 'Gran/0/Granule_98/v', 'Gran/1/Granule_98/v', 'Gran/2/Granule_98/v'] for c in cols: baseline[c] = [] for line in open(file_name): values = line.split() for vi in range(len(values)): baseline[cols[vi]].append(float(values[vi])*1000) volts = {} for k in baseline.keys(): if k!='t': volts[k]=baseline[k] utils.simple_network_analysis(volts, baseline['t'], targets = targets, plot=True, show_plot_already=False)
def get_data_metrics(datafile: Container) -> Tuple[Dict, Dict, Dict]: """Analyse the data to get metrics to tune against. :returns: metrics from pyelectro analysis, currents, and the membrane potential values """ analysis_results = {} currents = {} memb_vals = {} total_acquisitions = len(datafile.acquisition) for acq in range(1, total_acquisitions): print("Going over acquisition # {}".format(acq)) # stimulus lasts about 1000ms, so we take about the first 1500 ms data_v = (datafile.acquisition["CurrentClampSeries_{:02d}".format( acq)].data[:15000] * 1000.0) # get sampling rate from the data sampling_rate = datafile.acquisition[ "CurrentClampSeries_{:02d}".format(acq)].rate # generate time steps from sampling rate data_t = np.arange(0, len(data_v) / sampling_rate, 1.0 / sampling_rate) * 1000.0 # run the analysis analysis_results[acq] = simple_network_analysis({acq: data_v}, data_t) # extract current from description, but can be extracted from other # locations also, such as the CurrentClampStimulus series. data_i = (datafile.acquisition["CurrentClampSeries_{:02d}".format( acq)].description.split("(")[1].split("~")[1].split(" ")[0]) currents[acq] = data_i memb_vals[acq] = (data_t, data_v) return (analysis_results, currents, memb_vals)
# define some time points in seconds (i.e., convert to absolute time) time_pts = np.arange(0, len(stimulus) / sampling_rate, 1. / sampling_rate) * 1000 chosen['t'] = time_pts comment = 'Sweep: %i in %i; %sms -> %sms; %sA -> %sA; %smV -> %smV' % ( sweep_number, dataset_id, time_pts[0], time_pts[-1], np.amin(stimulus), np.amax(stimulus), np.amin(response), np.amax(response)) print(comment) analysis = utils.simple_network_analysis({sweep_number: response}, time_pts, end_analysis=1500, plot=False, show_plot_already=False, verbose=False) spike_count[sweep_number] = analysis['%s:max_peak_no' % sweep_number] subthresh = analysis['%s:max_peak_no' % sweep_number] == 0 if subthresh: subthreshs[sweep_number] = amp else: spikings[sweep_number] = amp subthreshs = OrderedDict( sorted(subthreshs.items(), key=operator.itemgetter(1))) spikings = OrderedDict(sorted(spikings.items(), key=operator.itemgetter(1)))
def extract_info_from_nwb_file(dataset_id, raw_ephys_file_name): info = {} import h5py import numpy as np h5f = h5py.File(raw_ephys_file_name, "r") metas = ['aibs_cre_line','aibs_dendrite_type','intracellular_ephys/Electrode 1/location'] for m in metas: d = h5f.get('/general/%s'%m) print("%s = \t%s"%(m,d.value)) info[m.split('/')[-1]]=str(d.value) h5f.close() from allensdk.core.nwb_data_set import NwbDataSet data_set = NwbDataSet(raw_ephys_file_name) sweep_numbers = data_set.get_experiment_sweep_numbers() if test: sweep_numbers = [33,45] sweep_numbers.sort() info[DH.DATASET] = dataset_id info[DH.COMMENT] = 'Data analysed on %s'%(time.ctime()) info[DH.PYELECTRO_VERSION] = pyel_ver info[DH.ALLENSDK_VERSION] = allensdk_ver info[DH.SWEEPS] = {} for sweep_number in sweep_numbers: sweep_data = data_set.get_sweep(sweep_number) if data_set.get_sweep_metadata(sweep_number)['aibs_stimulus_name'] == "Long Square": sweep_info = {} sweep_info[DH.METADATA] = data_set.get_sweep_metadata(sweep_number) info[DH.SWEEPS]['%i'%sweep_number] = sweep_info sweep_info[DH.SWEEP] = sweep_number # start/stop indices that exclude the experimental test pulse (if applicable) index_range = sweep_data['index_range'] # stimulus is a numpy array in amps stimulus = sweep_data['stimulus'][index_range[0]:index_range[-1]] # response is a numpy array in volts response = sweep_data['response'][index_range[0]:index_range[-1]]*1000 # sampling rate is in Hz sampling_rate = sweep_data['sampling_rate'] # define some time points in seconds (i.e., convert to absolute time) time_pts = np.arange(0,len(stimulus)/sampling_rate,1./sampling_rate)*1000 comment = 'Sweep: %i in %i; %sms -> %sms; %sA -> %sA; %smV -> %smV'%(sweep_number, dataset_id, time_pts[0], time_pts[-1], np.amin(stimulus), np.amax(stimulus), np.amin(response), np.amax(response)) print(comment) sweep_info[DH.COMMENT] = comment analysis = utils.simple_network_analysis({sweep_number:response}, time_pts, extra_targets = ['%s:value_280'%sweep_number, '%s:average_1000_1200'%sweep_number, '%s:average_100_200'%sweep_number], end_analysis=1500, plot=plot, show_plot_already=False, verbose=True) sweep_info[DH.ICLAMP_ANALYSIS] = analysis analysis_file_name = '%s_analysis.json'%(dataset_id) analysis_file = open(analysis_file_name, 'w') pretty = pp.pformat(info) pretty = pretty.replace('\'', '"') pretty = pretty.replace('u"', '"') analysis_file.write(pretty) analysis_file.close() print('Written info to %s'%analysis_file_name)
def extract_info_from_nwb_file(dataset_id, raw_ephys_file_name): info = {} import h5py import numpy as np h5f = h5py.File(raw_ephys_file_name, "r") metas = [ 'aibs_cre_line', 'aibs_dendrite_type', 'intracellular_ephys/Electrode 1/location' ] for m in metas: d = h5f.get('/general/%s' % m) print("%s = \t%s" % (m, d.value)) info[m.split('/')[-1]] = str(d.value) h5f.close() from allensdk.core.nwb_data_set import NwbDataSet data_set = NwbDataSet(raw_ephys_file_name) sweep_numbers = data_set.get_experiment_sweep_numbers() if test: sweep_numbers = [33, 45] sweep_numbers.sort() info[DH.DATASET] = dataset_id info[DH.COMMENT] = 'Data analysed on %s' % (time.ctime()) info[DH.PYELECTRO_VERSION] = pyel_ver info[DH.ALLENSDK_VERSION] = allensdk_ver info[DH.SWEEPS] = {} for sweep_number in sweep_numbers: sweep_data = data_set.get_sweep(sweep_number) if data_set.get_sweep_metadata( sweep_number)['aibs_stimulus_name'] == "Long Square": sweep_info = {} sweep_info[DH.METADATA] = data_set.get_sweep_metadata(sweep_number) info[DH.SWEEPS]['%i' % sweep_number] = sweep_info sweep_info[DH.SWEEP] = sweep_number # start/stop indices that exclude the experimental test pulse (if applicable) index_range = sweep_data['index_range'] # stimulus is a numpy array in amps stimulus = sweep_data['stimulus'][index_range[0]:index_range[-1]] # response is a numpy array in volts response = sweep_data['response'][ index_range[0]:index_range[-1]] * 1000 # sampling rate is in Hz sampling_rate = sweep_data['sampling_rate'] # define some time points in seconds (i.e., convert to absolute time) time_pts = np.arange(0, len(stimulus) / sampling_rate, 1. / sampling_rate) * 1000 comment = 'Sweep: %i in %i; %sms -> %sms; %sA -> %sA; %smV -> %smV' % ( sweep_number, dataset_id, time_pts[0], time_pts[-1], np.amin(stimulus), np.amax(stimulus), np.amin(response), np.amax(response)) print(comment) sweep_info[DH.COMMENT] = comment analysis = utils.simple_network_analysis( {sweep_number: response}, time_pts, extra_targets=[ '%s:value_280' % sweep_number, '%s:average_1000_1200' % sweep_number, '%s:average_100_200' % sweep_number ], end_analysis=1500, plot=plot, show_plot_already=False, verbose=True) sweep_info[DH.ICLAMP_ANALYSIS] = analysis analysis_file_name = '%s_analysis.json' % (dataset_id) analysis_file = open(analysis_file_name, 'w') pretty = pp.pformat(info) pretty = pretty.replace('\'', '"') pretty = pretty.replace('u"', '"') analysis_file.write(pretty) analysis_file.close() print('Written info to %s' % analysis_file_name)
response = sweep_data['response'][index_range[0]:index_range[-1]]*1000 chosen[sweep_number] = response # sampling rate is in Hz sampling_rate = sweep_data['sampling_rate'] # define some time points in seconds (i.e., convert to absolute time) time_pts = np.arange(0,len(stimulus)/sampling_rate,1./sampling_rate)*1000 chosen['t'] = time_pts comment = 'Sweep: %i in %i; %sms -> %sms; %sA -> %sA; %smV -> %smV'%(sweep_number, dataset_id, time_pts[0], time_pts[-1], np.amin(stimulus), np.amax(stimulus), np.amin(response), np.amax(response)) print(comment) analysis = utils.simple_network_analysis({sweep_number:response}, time_pts, end_analysis=1500, plot=False, show_plot_already=False, verbose=False) spike_count[sweep_number] = analysis['%s:max_peak_no'%sweep_number] subthresh = analysis['%s:max_peak_no'%sweep_number] == 0 if subthresh: subthreshs[sweep_number] = amp else: spikings[sweep_number] = amp subthreshs = OrderedDict(sorted(subthreshs.items(), key=operator.itemgetter(1))) spikings = OrderedDict(sorted(spikings.items(), key=operator.itemgetter(1))) print("Subthreshold sweeps: %s"%subthreshs) print("Spiking sweeps: %s"%spikings)
# sampling rate is in Hz sampling_rate = sweep_data['sampling_rate'] # define some time points in seconds (i.e., convert to absolute time) time_pts = np.arange(0,len(stimulus)/sampling_rate,1./sampling_rate)*1000 comment = 'Sweep: %i in %i; %sms -> %sms; %sA -> %sA; %smV -> %smV'%(sweep_number, dataset_id, time_pts[0], time_pts[-1], np.amin(stimulus), np.amax(stimulus), np.amin(response), np.amax(response)) print(comment) sweep_info[DH.COMMENT] = comment analysis = utils.simple_network_analysis({sweep_number:response}, time_pts, extra_targets = ['%s:value_280'%sweep_number, '%s:average_1000_1200'%sweep_number], end_analysis=1300, plot=plot, show_plot_already=False, verbose=True) sweep_info[DH.ICLAMP_ANALYSIS] = analysis analysis_file_name = '%s_analysis.json'%(dataset_id) analysis_file = open(analysis_file_name, 'w') pretty = pp.pformat(info) pretty = pretty.replace('\'', '"') pretty = pretty.replace('u"', '"') analysis_file.write(pretty) analysis_file.close() print('Written info to %s'%analysis_file_name)
elif '-mone' in sys.argv: simulator = 'jNeuroML_NEURON' #simulator = 'jNeuroML' sim_time = 700 cont = NeuroMLController('TestGranNet', 'models/GranuleCellMulti.net.nml', 'network_GranuleCell_multi', sim_time, dt, simulator) t, v = cont.run_individual(sim_vars, show=False) plot_baseline_data(targets=target_data_pas.keys()) analysis = utils.simple_network_analysis( v, t, plot=True, targets=target_data_pas.keys()) elif '-mtune' in sys.argv: simulator = 'jNeuroML' simulator = 'jNeuroML_NEURON' prefix = 'TestGranNet' neuroml_file = 'models/GranuleCellMulti.net.nml' target = 'network_GranuleCell_multi' sim_time = 700 population_size = 20 max_evaluations = 20 num_selected = 5 num_offspring = 5
#simulator = 'jNeuroML' sim_time = 700 cont = NeuroMLController('TestGranNet', 'models/GranuleCellMulti.net.nml', 'network_GranuleCell_multi', sim_time, dt, simulator) t, v = cont.run_individual(sim_vars, show=False) plot_baseline_data(targets = target_data_pas.keys()) analysis = utils.simple_network_analysis(v, t, plot=True, targets = target_data_pas.keys()) elif '-mtune' in sys.argv: simulator = 'jNeuroML' simulator = 'jNeuroML_NEURON' prefix = 'TestGranNet' neuroml_file = 'models/GranuleCellMulti.net.nml' target = 'network_GranuleCell_multi' sim_time = 700 population_size = 20
1. / sampling_rate) * 1000 comment = 'Sweep: %i in %i; %sms -> %sms; %sA -> %sA; %smV -> %smV' % ( sweep_number, dataset_id, time_pts[0], time_pts[-1], np.amin(stimulus), np.amax(stimulus), np.amin(response), np.amax(response)) print(comment) sweep_info[DH.COMMENT] = comment analysis = utils.simple_network_analysis( {sweep_number: response}, time_pts, extra_targets=[ '%s:value_280' % sweep_number, '%s:average_1000_1200' % sweep_number ], end_analysis=1300, plot=plot, show_plot_already=False, verbose=True) sweep_info[DH.ICLAMP_ANALYSIS] = analysis analysis_file_name = '%s_analysis.json' % (dataset_id) analysis_file = open(analysis_file_name, 'w') pretty = pp.pformat(info) pretty = pretty.replace('\'', '"') pretty = pretty.replace('u"', '"') analysis_file.write(pretty) analysis_file.close()