def get_md5s(PS_prefix): '''return list of md5 hash keys from parameterspace prefix''' md5s = [] PS = ParameterSpace(os.path.join('parameters', PS_prefix + '.pspace')) for i, PSet in enumerate(PS.iter_inner()): id = get_md5(PSet) md5s += [id] return md5s
def make_param_dict_list(): """ create a list of parameter dictionaries for the model network. """ # there is certainly a way to do this with NeuroTools. import numpy rates = numpy.linspace(start = 10., stop = 100., num = 5) weights = numpy.linspace(start = 0.1, stop = 1.0, num = 5) from NeuroTools.parameters import ParameterSet, ParameterSpace, ParameterRange params = ParameterSpace(ParameterSet({'rate':ParameterRange(rates), 'weight': ParameterRange(weights)})) dictlist = [p.as_dict() for p in params.iter_inner()] return dictlist
def make_param_dict_list(N): """ create a list of parameter dictionaries for the model network. """ N_snr, N_seeds = 5, 10 from NeuroTools.parameters import ParameterSpace, ParameterRange import numpy params = ParameterSpace({ 'N' : N, 'snr' : ParameterRange(list(numpy.linspace(0.1,2.0,N_snr))), 'kernelseed' : ParameterRange(list([12345+ k for k in range(N_seeds)]))}) dictlist = [p.as_dict() for p in params.iter_inner()] return dictlist
def make_param_dict_list(): """ create a list of parameter dictionaries for the model network. """ # there is certainly a way to do this with NeuroTools. import numpy rates = numpy.linspace(start=10., stop=100., num=5) weights = numpy.linspace(start=0.1, stop=1.0, num=5) from NeuroTools.parameters import ParameterSet, ParameterSpace, ParameterRange params = ParameterSpace( ParameterSet({ 'rate': ParameterRange(rates), 'weight': ParameterRange(weights) })) dictlist = [p.as_dict() for p in params.iter_inner()] return dictlist
def make_param_dict_list(N): """ create a list of parameter dictionaries for the model network. """ N_snr, N_seeds = 5, 10 from NeuroTools.parameters import ParameterSpace, ParameterRange import numpy params = ParameterSpace({ 'N': N, 'snr': ParameterRange(list(numpy.linspace(0.1, 2.0, N_snr))), 'kernelseed': ParameterRange(list([12345 + k for k in range(N_seeds)])) }) dictlist = [p.as_dict() for p in params.iter_inner()] return dictlist
def test_gridsearcher(self): def func(param_dict): x, y = param_dict['x'], param_dict['y'] return x * x + y * y from NeuroTools.parameters import ParameterSpace, ParameterRange grid = ParameterSpace({ 'x': ParameterRange([-2., -1., 0., 1., 2.]), 'y': ParameterRange([-2., -1., 0., 1., 2.]) }) gs = optimizers.GridSearcher(grid, func) retdict = gs.search() self.failUnlessEqual(retdict['min_value'], 0.) self.failUnlessEqual(retdict['min_params'], {'x': 0., 'y': 0.})
Having fixed the background noise we are just studying now how different signal to noise ratios are integrated by the neurons. Laurent Perrinet, INCM, CNRS $ Id $ """ import os, sys, numpy, pylab, shelve N, N_exp = 1000, 6 t_smooth = 100. # width (in ms) of the integration window from NeuroTools.parameters import ParameterSpace, ParameterRange snr = 2.0 * numpy.linspace(0.1,2.0,N_exp) p = ParameterSpace({'snr' : ParameterRange(list(snr))}) name = sys.argv[0].split('.')[0] # name of the current script withpout the '.py' part results = shelve.open('results/mat-' + name) try: temporal_ON = results['temporal_ON'] temporal_OFF = results['temporal_OFF'] lower_edges = results['lower_edges'] params = results['params'] #if (params == retina.params): raise('Parameters have changed') except: from retina import * retina = Retina(N)
parser.add_option("-f", "--hostfile", action="callback", callback=read_hostfile, type=str, help="Provide a hostfile") (options, args) = parser.parse_args() if len(args) < 2: parser.error("incorrect number of arguments") test_script, url = args[:2] script_args = args[2:] trials = options.trials if hasattr(options, "host_list"): host_list = options.host_list else: host_list = [socket.gethostname()] # by default, run just on the current host # iterate over the parameter space, creating a job each time parameter_space = ParameterSpace(url) tempfiles = [] job_manager = JobManager(host_list, delay=0, quiet=False) for sub_parameter_space in parameter_space.iter_inner(copy=True): for parameter_set in sub_parameter_space.realize_dists(n=trials, copy=True): ##print parameter_set.pretty() fd, tmp_url = tempfile.mkstemp(dir=os.getcwd()) os.close(fd) tempfiles.append(tmp_url) parameter_set.save(tmp_url) job_manager.run(test_script, parameter_set._url, *script_args) # wait until all jobs have finished job_manager.wait()
def default_parameters(): # receptive field parameters p = ParameterSpace({}) p.Ac = 1. p.As = 1. / 3. p.K1 = 1.05 p.K2 = 0.7 p.c1 = 0.14 p.c2 = 0.12 p.n1 = 7. p.n2 = 8. p.t1 = -6. # ms p.t2 = -6. # ms p.td = 6.0 # time differece between ON-OFF p.sigma_c = 0.3 #0.4 # Allen 2006 # sigma of center gauss degree p.sigma_s = 1.5 #p.sigma_c*1.5+0.4 # Allen 2006 # sigma of surround gauss degree # Kernel dims # temporal p.size = 10. # degree p.degree_per_pixel = 0.1133 # spatial p.dt = 1.0 # ms p.duration = 200. # ms return p
def default_parameters(): # receptive field parameters p = ParameterSpace({}) p.Ac = 1. p.As = 1./3. p.K1 = 1.05 p.K2 = 0.7 p.c1 = 0.14 p.c2 = 0.12 p.n1 = 7. p.n2 = 8. p.t1 = -6. # ms p.t2 = -6. # ms p.td = 6.0 # time differece between ON-OFF p.sigma_c = 0.3#0.4 # Allen 2006 # sigma of center gauss degree p.sigma_s = 1.5#p.sigma_c*1.5+0.4 # Allen 2006 # sigma of surround gauss degree # Kernel dims # temporal p.size = 10. # degree p.degree_per_pixel = 0.1133 # spatial p.dt = 1.0 # ms p.duration = 200. # ms return p
PS['simres_RS'] = ParameterSpace({ 'morphology': os.path.join('morphologies', 'C120398A-P1.CNG_sansAxon.hoc'), #L4 Stellate Cell, rat 'rm': ParameterRange([11250]), 'Ra': ParameterRange([150]), 'cm': ParameterRange([0.9]), 'custom_code': ParameterRange([['add_spines.hoc']]), 'active': ParameterRange([False]), 'v_init': ParameterRange([-66.]), #Beierlein 'pop_params_n': ParameterRange([4000]), 'pop_geom': ParameterRange([[500, -250, 250]]), 'tau1': ParameterRange([0.2]), 'tau2': ParameterRange([2.0]), #was 2, [1.75]), 'weight': ParameterRange([0.0004]), 'e': ParameterRange([0]), 'spiketime': ParameterRange([2.4]), 'disttype': ParameterRange(['hard_sphere' ]), #type of synaptic distribution probability 'sigma': ParameterRange([[165]]), #spread of distribution 'my': ParameterRange([35]), #offset 'mean_n_syn': ParameterRange([7]), 'section_syn': ParameterRange([['apic', 'dend']]), 'icsd_diam': ParameterRange([400E-6]), 'electrode_r': ParameterRange([15]), 'simulated': False, 'randomseed': ParameterRange( [474472279, 3495581941, 2563836960, 400995787, 3077891106, 123456789]), })
- all the cc's are plotted Performed at the NeuroTools demo session, INCF booth, SfN annual meeting 2008, Washington. DC. """ import numpy, pylab import NeuroTools.stgen as stgen sg = stgen.StGen() from NeuroTools.parameters import ParameterSpace from NeuroTools.parameters import ParameterRange from NeuroTools.sandbox import make_name # creating a ParameterSpace p = ParameterSpace({}) # adding fixed parameters p.nu = 20. # rate [Hz] p.duration = 1000. # adding ParameterRanges p.c = ParameterRange([0.0, 0.01, 0.1, 0.5]) p.jitter = ParameterRange([ 0.0, 1.0, 5.0, ]) # calculation of the ParameterSpace dimension and the labels of the parameters # containing a range
help="Provide a hostfile") (options, args) = parser.parse_args() if len(args) < 2: parser.error("incorrect number of arguments") test_script, url = args[:2] script_args = args[2:] trials = options.trials if hasattr(options, "host_list"): host_list = options.host_list else: host_list = [socket.gethostname() ] # by default, run just on the current host # iterate over the parameter space, creating a job each time parameter_space = ParameterSpace(url) tempfiles = [] job_manager = JobManager(host_list, delay=0, quiet=False) for sub_parameter_space in parameter_space.iter_inner(copy=True): for parameter_set in sub_parameter_space.realize_dists(n=trials, copy=True): ##print parameter_set.pretty() fd, tmp_url = tempfile.mkstemp(dir=os.getcwd()) os.close(fd) tempfiles.append(tmp_url) parameter_set.save(tmp_url) job_manager.run(test_script, parameter_set._url, *script_args) # wait until all jobs have finished job_manager.wait()
- all the cc's are plotted Performed at the NeuroTools demo session, INCF booth, SfN annual meeting 2008, Washington. DC. """ import numpy, pylab import NeuroTools.stgen as stgen sg = stgen.StGen() from NeuroTools.parameters import ParameterSpace from NeuroTools.parameters import ParameterRange from NeuroTools.sandbox import make_name # creating a ParameterSpace p = ParameterSpace({}) # adding fixed parameters p.nu = 20. # rate [Hz] p.duration = 1000. # adding ParameterRanges p.c = ParameterRange([0.0,0.01,0.1,0.5]) p.jitter = ParameterRange([0.0,1.0,5.0,]) # calculation of the ParameterSpace dimension and the labels of the parameters # containing a range dims, labels = p.parameter_space_dimension_labels() print "dimensions: ", dims print ' labels: ', labels
Having fixed the background noise we are just studying now how different signal to noise ratios are integrated by the neurons. Laurent Perrinet, INCM, CNRS $ Id $ """ import os, sys, numpy, pylab, shelve N, N_exp = 1000, 6 t_smooth = 100. # width (in ms) of the integration window from NeuroTools.parameters import ParameterSpace, ParameterRange snr = 2.0 * numpy.linspace(0.1, 2.0, N_exp) p = ParameterSpace({'snr': ParameterRange(list(snr))}) name = sys.argv[0].split('.')[ 0] # name of the current script withpout the '.py' part results = shelve.open('results/mat-' + name) try: temporal_ON = results['temporal_ON'] temporal_OFF = results['temporal_OFF'] lower_edges = results['lower_edges'] params = results['params'] #if (params == retina.params): raise('Parameters have changed') except: from retina import * retina = Retina(N)