def get_md5s(PS_prefix):
    '''return list of md5 hash keys from parameterspace prefix'''
    md5s = []
    PS = ParameterSpace(os.path.join('parameters', PS_prefix + '.pspace'))
    for i, PSet in enumerate(PS.iter_inner()):
        id = get_md5(PSet)
        md5s += [id]
    return md5s
def make_param_dict_list():
    """
    create a list of parameter dictionaries for the model network.
    """
    # there is certainly a way to do this with NeuroTools. 
    import numpy
    rates = numpy.linspace(start = 10., stop = 100.,  num = 5)
    weights = numpy.linspace(start = 0.1,  stop = 1.0, num = 5)
    from NeuroTools.parameters import ParameterSet, ParameterSpace, ParameterRange
    params = ParameterSpace(ParameterSet({'rate':ParameterRange(rates), 
                                          'weight': ParameterRange(weights)}))
    dictlist = [p.as_dict() for p in params.iter_inner()]
    return dictlist
Ejemplo n.º 3
0
def make_param_dict_list(N):
    """
    create a list of parameter dictionaries for the model network.
    """
    N_snr, N_seeds =  5, 10
    from NeuroTools.parameters import ParameterSpace, ParameterRange
    import numpy
    params =  ParameterSpace({
                    'N' : N,
                    'snr' : ParameterRange(list(numpy.linspace(0.1,2.0,N_snr))),
                    'kernelseed' : ParameterRange(list([12345+ k for k in range(N_seeds)]))})

    dictlist = [p.as_dict() for p in params.iter_inner()]
    return dictlist
def make_param_dict_list():
    """
    create a list of parameter dictionaries for the model network.
    """
    # there is certainly a way to do this with NeuroTools.
    import numpy
    rates = numpy.linspace(start=10., stop=100., num=5)
    weights = numpy.linspace(start=0.1, stop=1.0, num=5)
    from NeuroTools.parameters import ParameterSet, ParameterSpace, ParameterRange
    params = ParameterSpace(
        ParameterSet({
            'rate': ParameterRange(rates),
            'weight': ParameterRange(weights)
        }))
    dictlist = [p.as_dict() for p in params.iter_inner()]
    return dictlist
Ejemplo n.º 5
0
def make_param_dict_list(N):
    """
    create a list of parameter dictionaries for the model network.
    """
    N_snr, N_seeds = 5, 10
    from NeuroTools.parameters import ParameterSpace, ParameterRange
    import numpy
    params = ParameterSpace({
        'N':
        N,
        'snr':
        ParameterRange(list(numpy.linspace(0.1, 2.0, N_snr))),
        'kernelseed':
        ParameterRange(list([12345 + k for k in range(N_seeds)]))
    })

    dictlist = [p.as_dict() for p in params.iter_inner()]
    return dictlist
Ejemplo n.º 6
0
    retina.params['amplitude'] = numpy.ones(retina.params['amplitude'].shape)
    

    # calculates the dimension of the parameter space
    results_dim, results_label = p.parameter_space_dimension_labels()

    # creates results array with size of parameter space dimension
    data = retina.run(retina.params,verbose=False)
    lower_edges = data['out_ON_DATA'].time_axis(t_smooth)
    N_smooth = len(lower_edges)
    
    temporal_ON, temporal_OFF = [],[]
    import progressbar # see http://projects.scipy.org/pipermail/scipy-dev/2008-January/008200.html
    pbar=progressbar.ProgressBar(widgets=[name, " ", progressbar.Percentage(), ' ',
            progressbar.Bar(), ' ', progressbar.ETA()], maxval=N_exp)
    for i_exp,experiment in enumerate(p.iter_inner()):
        params = retina.params
        params.update(experiment) # updates what changed in the dictionary
        # simulate the experiment and get its data
        data = retina.run(params,verbose=False)
        # calculating the index in the parameter space
        index = p.parameter_space_index(experiment)
        # put the data at the right position in the results array
        temporal_ON.append(sum(data['out_ON_DATA'].firing_rate(t_smooth))/N)#
        temporal_OFF.append(sum(data['out_OFF_DATA'].firing_rate(t_smooth))/N)#
        pbar.update(i_exp)

    
    results['lower_edges'] = lower_edges
    results['temporal_ON'] = temporal_ON
    results['temporal_OFF'] = temporal_OFF
Ejemplo n.º 7
0
    parser.error("incorrect number of arguments")

test_script, url = args[:2]
script_args = args[2:]
trials = options.trials
if hasattr(options, "host_list"):
    host_list = options.host_list
else:
    host_list = [socket.gethostname()] # by default, run just on the current host

# iterate over the parameter space, creating a job each time
parameter_space = ParameterSpace(url)
tempfiles = []
job_manager = JobManager(host_list, delay=0, quiet=False)

for sub_parameter_space in parameter_space.iter_inner(copy=True):
    for parameter_set in sub_parameter_space.realize_dists(n=trials, copy=True):
        ##print parameter_set.pretty()
        fd, tmp_url = tempfile.mkstemp(dir=os.getcwd())
        os.close(fd)
        tempfiles.append(tmp_url)
        parameter_set.save(tmp_url)
        job_manager.run(test_script, parameter_set._url, *script_args)

# wait until all jobs have finished    
job_manager.wait()

# retrieve results stored by the jobs. We use the NeuroTools datastore, so
# the test module itself is the storage key.
test_module = __import__(test_script.replace(".py",""))
ds = datastore.ShelveDataStore(root_dir=parameter_space.results_dir,
Ejemplo n.º 8
0
    cc = numpy.correlate(st1.time_histogram(time_bin=1.0),
                         st2.time_histogram(time_bin=1.),
                         mode='same')
    corrcoef = numpy.corrcoef(st1.time_histogram(time_bin=1.0),
                              st2.time_histogram(time_bin=1.))
    time_axis_cc = numpy.linspace(-cc.shape[0] / 2., cc.shape[0] / 2.,
                                  cc.shape[0])
    return cc, time_axis_cc, corrcoef[0][1]


# creating a results array, with the dimensions of the ParameterSpace
corrcoef_results = numpy.empty(dims)

# scanning the ParameterSpace
for experiment in p.iter_inner():
    # calculation of the index in the space
    index = p.parameter_space_index(experiment)
    # perfomring the experiment
    cc, time_axis_cc, corrcoef = calc_cc(experiment)
    corrcoef_results[index] = corrcoef
    # plotting the cc's
    subplot_index = (dims[1] * index[0]) + index[1]
    pylab.subplot(dims[0], dims[1], subplot_index + 1)
    pylab.plot(time_axis_cc, cc)
    pylab.title(make_name(experiment, p.range_keys()))
    pylab.xlim(-30, 30.)
    pylab.ylim(0, 10.)

# plot the results
pylab.matshow(corrcoef_results)
Ejemplo n.º 9
0
test_script, url = args[:2]
script_args = args[2:]
trials = options.trials
if hasattr(options, "host_list"):
    host_list = options.host_list
else:
    host_list = [socket.gethostname()
                 ]  # by default, run just on the current host

# iterate over the parameter space, creating a job each time
parameter_space = ParameterSpace(url)
tempfiles = []
job_manager = JobManager(host_list, delay=0, quiet=False)

for sub_parameter_space in parameter_space.iter_inner(copy=True):
    for parameter_set in sub_parameter_space.realize_dists(n=trials,
                                                           copy=True):
        ##print parameter_set.pretty()
        fd, tmp_url = tempfile.mkstemp(dir=os.getcwd())
        os.close(fd)
        tempfiles.append(tmp_url)
        parameter_set.save(tmp_url)
        job_manager.run(test_script, parameter_set._url, *script_args)

# wait until all jobs have finished
job_manager.wait()

# retrieve results stored by the jobs. We use the NeuroTools datastore, so
# the test module itself is the storage key.
test_module = __import__(test_script.replace(".py", ""))
        st1.merge(st3.jitter(p.jitter))
        st2.merge(st3.jitter(p.jitter))
    
    cc = numpy.correlate(st1.time_histogram(time_bin = 1.0),
                         st2.time_histogram(time_bin = 1.),mode = 'same')
    corrcoef = numpy.corrcoef(st1.time_histogram(time_bin = 1.0),
                              st2.time_histogram(time_bin = 1.))
    time_axis_cc = numpy.linspace(-cc.shape[0]/2.,cc.shape[0]/2.,cc.shape[0])
    return cc, time_axis_cc, corrcoef[0][1]


# creating a results array, with the dimensions of the ParameterSpace
corrcoef_results = numpy.empty(dims)

# scanning the ParameterSpace
for experiment in p.iter_inner():
    # calculation of the index in the space
    index = p.parameter_space_index(experiment)
    # perfomring the experiment
    cc,time_axis_cc, corrcoef = calc_cc(experiment)
    corrcoef_results[index] = corrcoef
    # plotting the cc's
    subplot_index = (dims[1]*index[0])+index[1]
    pylab.subplot(dims[0],dims[1],subplot_index+1)
    pylab.plot(time_axis_cc,cc)
    pylab.title(make_name(experiment,p.range_keys()))
    pylab.xlim(-30,30.)
    pylab.ylim(0,10.)


# plot the results
Ejemplo n.º 11
0
    # creates results array with size of parameter space dimension
    data = retina.run(retina.params, verbose=False)
    lower_edges = data['out_ON_DATA'].time_axis(t_smooth)
    N_smooth = len(lower_edges)

    temporal_ON, temporal_OFF = [], []
    import progressbar  # see http://projects.scipy.org/pipermail/scipy-dev/2008-January/008200.html
    pbar = progressbar.ProgressBar(widgets=[
        name, " ",
        progressbar.Percentage(), ' ',
        progressbar.Bar(), ' ',
        progressbar.ETA()
    ],
                                   maxval=N_exp)
    for i_exp, experiment in enumerate(p.iter_inner()):
        params = retina.params
        params.update(experiment)  # updates what changed in the dictionary
        # simulate the experiment and get its data
        data = retina.run(params, verbose=False)
        # calculating the index in the parameter space
        index = p.parameter_space_index(experiment)
        # put the data at the right position in the results array
        temporal_ON.append(sum(data['out_ON_DATA'].firing_rate(t_smooth)) /
                           N)  #
        temporal_OFF.append(
            sum(data['out_OFF_DATA'].firing_rate(t_smooth)) / N)  #
        pbar.update(i_exp)

    results['lower_edges'] = lower_edges
    results['temporal_ON'] = temporal_ON