Example #1
0
def parameter_search_run_script( simulation_name, master_results_dir, run_script, core_number ):
    """
    Executes *run_script*, one per each parameter combination of an existing parameter search run.
    Each execution receives as the first commandline argument the directory in which the results for the given
    parameter combination were stored.
    
    Parameters
    ----------
    simulation_name : str
                    The name of the simulation.
    master_results_dir : str
                    The directory where the parameter search results are stored.
    run_script : str
                    The name of the script to be run. The directory name of the given parameter combination datastore will be passed to it as the first command line argument.
    core_number : int
                How many cores to reserve per process.
    """
    f = open(master_results_dir+'/parameter_combinations','rb')
    combinations = pickle.load(f)
    f.close()
    
    # first check whether all parameter combinations contain the same parameter names
    assert len(set([tuple(set(comb.keys())) for comb in combinations])) == 1 , "The parameter search didn't occur over a fixed set of parameters"

    from subprocess import Popen, PIPE, STDOUT
    for i,combination in enumerate(combinations):
        rdn = master_results_dir+'/'+result_directory_name('ParameterSearch',simulation_name,combination)    
        #print rdn
        subprocess.call(' '.join(["python", run_script, "'"+rdn+"'"]  +['>']  + ["'"+rdn +'/OUTFILE_analysis'+str(time.time()) + "'"]), shell=True)
Example #2
0
def parameter_search_run_script_distributed_slurm(simulation_name,
                                                  master_results_dir,
                                                  run_script, core_number):
    """
    Scheadules the execution of *run_script*, one per each parameter combination of an existing parameter search run.
    Each execution receives as the first commandline argument the directory in which the results for the given
    parameter combination were stored.
    
    Parameters
    ----------
    simulation_name : str
                    The name of the simulation.
    master_results_dir : str
                    The directory where the parameter search results are stored.
    run_script : str
                    The name of the script to be run. The directory name of the given parameter combination datastore will be passed to it as the first command line argument.
    core_number : int
                How many cores to reserve per process.
    """
    f = open(master_results_dir + '/parameter_combinations', 'rb')
    combinations = pickle.load(f)
    f.close()

    # first check whether all parameter combinations contain the same parameter names
    assert len(
        set([tuple(set(comb.keys())) for comb in combinations])
    ) == 1, "The parameter search didn't occur over a fixed set of parameters"

    from subprocess import Popen, PIPE, STDOUT
    for i, combination in enumerate(combinations):
        rdn = master_results_dir + '/' + result_directory_name(
            'ParameterSearch', simulation_name, combination)
        p = Popen(['sbatch'] +
                  ['-o', master_results_dir + "/slurm_analysis-%j.out"],
                  stdin=PIPE,
                  stdout=PIPE,
                  stderr=PIPE,
                  text=True)

        # THIS IS A BIT OF A HACK, have to add customization for other people ...
        data = '\n'.join([
            '#!/bin/bash',
            '#SBATCH -J MozaikParamSearchAnalysis',
            '#SBATCH -c ' + str(core_number),
            'source /opt/software/mpi/openmpi-1.6.3-gcc/env',
            'source /home/antolikjan/env/mozaiknew/bin/activate',
            'cd ' + os.getcwd(),
            'echo "DSADSA"',
            ' '.
            join([
                "mpirun", " --mca mtl ^psm python", run_script, "'" + rdn + "'"
            ] + ['>'] +
                 ["'" + rdn + '/OUTFILE_analysis' + str(time.time()) + "'"]),
        ])
        print(p.communicate(input=data)[0])
        print(data)
        p.stdin.close()
Example #3
0
def load_fixed_parameter_set_parameter_search(simulation_name,
                                              master_results_dir,
                                              filter=None):
    """
    Loads all datastores of parameter search over a fixed set of parameters. 
    
    Parameters
    ----------
    simulation_name : str
                    The name of the simulation.
    master_results_dir : str
                       The directory where the parameter search results are stored.
    
    Returns
    -------
    A tuple (parameters,datastores), where `parameters` is a list of parameters over which the parameter search was performed.
    The dsvs is a list of tuples (values,datastore) where `values` is a list of values (in the order as im `parameters`) of the
    parameters, and dsv is a DataStore with results recorded to the combination of parameter values.
    """
    f = open(master_results_dir + '/parameter_combinations', 'rb')
    combinations = pickle.load(f)
    f.close()

    # first check whether all parameter combinations contain the same parameter names
    assert len(
        set([tuple(set(comb.keys())) for comb in combinations])
    ) == 1, "The parameter search didn't occur over a fixed set of parameters"

    parameters = combinations[0].keys()

    datastore = []
    number_of_unloadable_datastores = 0
    for i, combination in enumerate(combinations):
        print i
        rdn = result_directory_name('ParameterSearch', simulation_name,
                                    combination)
        try:
            data_store = PickledDataStore(load=True,
                                          parameters=ParameterSet({
                                              'root_directory':
                                              master_results_dir + '/' + rdn,
                                              'store_stimuli':
                                              False
                                          }),
                                          replace=False)
            if filter != None:
                filter.query(data_store).remove_ads_outside_of_dsv()

            datastore.append(
                ([combination[k] for k in parameters], data_store))
        except IOError:
            number_of_unloadable_datastores = number_of_unloadable_datastores + 1
            print "Error loading datastore: " + rdn

    return (parameters, datastore, number_of_unloadable_datastores)
Example #4
0
def load_fixed_parameter_set_parameter_search(simulation_name,master_results_dir,filter=None):
    """
    Loads all datastores of parameter search over a fixed set of parameters. 
    
    Parameters
    ----------
    simulation_name : str
                    The name of the simulation.
    master_results_dir : str
                       The directory where the parameter search results are stored.
    
    Returns
    -------
    A tuple (parameters,datastores), where `parameters` is a list of parameters over which the parameter search was performed.
    The dsvs is a list of tuples (values,datastore) where `values` is a list of values (in the order as im `parameters`) of the
    parameters, and dsv is a DataStore with results recorded to the combination of parameter values.
    """
    f = open(master_results_dir+'/parameter_combinations','rb')
    combinations = pickle.load(f)
    f.close()
    
    # first check whether all parameter combinations contain the same parameter names
    assert len(set([tuple(set(comb.keys())) for comb in combinations])) == 1 , "The parameter search didn't occur over a fixed set of parameters"
    
    parameters = combinations[0].keys()
    
    datastore = []
    number_of_unloadable_datastores = 0
    for i,combination in enumerate(combinations):
        print i
        rdn = result_directory_name('ParameterSearch',simulation_name,combination)
        try:
            data_store = PickledDataStore(load=True,parameters=ParameterSet({'root_directory': master_results_dir + '/' + rdn,'store_stimuli' : False}),replace=False)
            if filter != None:
               filter.query(data_store).remove_ads_outside_of_dsv()

            datastore.append(([combination[k] for k in parameters],data_store))
        except IOError:
            number_of_unloadable_datastores = number_of_unloadable_datastores + 1
            print "Error loading datastore: " + rdn
        except ValueError:
            number_of_unloadable_datastores = number_of_unloadable_datastores + 1
            print "Error loading datastore: " + rdn
        except EOFError:
            number_of_unloadable_datastores = number_of_unloadable_datastores + 1
            print "Error loading datastore: " + rdn            
    return (parameters,datastore,number_of_unloadable_datastores)
Example #5
0
def parameter_search_run_script_distributed_slurm(simulation_name,master_results_dir,run_script,core_number):
    """
    Scheadules the execution of *run_script*, one per each parameter combination of an existing parameter search run.
    Each execution receives as the first commandline argument the directory in which the results for the given
    parameter combination were stored.
    
    Parameters
    ----------
    simulation_name : str
                    The name of the simulation.
    master_results_dir : str
                    The directory where the parameter search results are stored.
    run_script : str
                    The name of the script to be run. The directory name of the given parameter combination datastore will be passed to it as the first command line argument.
    core_number : int
                How many cores to reserve per process.
    """
    f = open(master_results_dir+'/parameter_combinations','rb')
    combinations = pickle.load(f)
    f.close()
    
    # first check whether all parameter combinations contain the same parameter names
    assert len(set([tuple(set(comb.keys())) for comb in combinations])) == 1 , "The parameter search didn't occur over a fixed set of parameters"
    
    from subprocess import Popen, PIPE, STDOUT
    for i,combination in enumerate(combinations):
        rdn = master_results_dir+'/'+result_directory_name('ParameterSearch',simulation_name,combination)    
        p = Popen(['sbatch'] +  ['-o',master_results_dir+"/slurm_analysis-%j.out" ],stdin=PIPE,stdout=PIPE,stderr=PIPE)
         
        # THIS IS A BIT OF A HACK, have to add customization for other people ...            
        data = '\n'.join([
                            '#!/bin/bash',
                            '#SBATCH -J MozaikParamSearchAnalysis',
                            '#SBATCH -c ' + str(core_number),
                            'source /opt/software/mpi/openmpi-1.6.3-gcc/env',
                            'source /home/antolikjan/env/mozaik/bin/activate',
                            'cd ' + os.getcwd(),
                            'echo "DSADSA"',                            
                            ' '.join(["mpirun"," --mca mtl ^psm python",run_script,"'"+rdn+"'"]  +['>']  + ["'"+rdn +'/OUTFILE_analysis'+str(time.time()) + "'"]),
                        ]) 
        print p.communicate(input=data)[0]                  
        print data
        p.stdin.close()
Example #6
0
def run_workflow(simulation_name, model_class, create_experiments):
    """
    This is the main function that executes a workflow. 
    
    It expects it gets the simulation, class of the model, and a function that will create_experiments.
    The create experiments function get a instance of a model as the only parameter and it is expected to return 
    a list of Experiment instances that should be executed over the model.
    
    The run workflow will automatically parse the command line to determine the simulator to be used and the path to the root parameter file. 
    It will also accept . (point) delimited path to parameteres in the configuration tree, and corresponding values. It will replace each such provided
    parameter's value with the provided one on the command line. 
    
    Parameters
    ----------
    simulation_name : str
                    The name of the simulation.
    
    model_class : class
                The class from which the model instance will be created from.
    
    create_experiments : func
                       The function that returns the list of experiments that will be executed on the model.
    
    Examples
    --------
    The intended syntax of the commandline is as follows (note that the simulation run name is the last argument):
    
    >>> python userscript simulator_name num_threads parameter_file_path modified_parameter_path_1 modified_parameter_value_1 ... modified_parameter_path_n modified_parameter_value_n simulation_run_name
    """
    (
        simulation_run_name,
        simulator_name,
        num_threads,
        parameters_url,
        modified_parameters,
    ) = parse_workflow_args()

    print "Loading parameters"
    parameters = load_parameters(parameters_url, modified_parameters)
    print "Finished loading parameters"

    p = {}
    if parameters.has_key('mozaik_seed'):
        p['mozaik_seed'] = parameters['mozaik_seed']
    if parameters.has_key('pynn_seed'):
        p['pynn_seed'] = parameters['pynn_seed']

    print "START MPI"

    mozaik.setup_mpi(**p)
    # Read parameters
    exec "import pyNN.nest as sim" in globals(), locals()

    # Create results directory
    timestamp = datetime.now().strftime('%Y%m%d-%H%M%S')

    ddir = result_directory_name(simulation_run_name, simulation_name,
                                 modified_parameters)

    if mozaik.mpi_comm and mozaik.mpi_comm.rank != 0:
        Global.root_directory = parameters.results_dir + ddir + '/' + str(
            mozaik.mpi_comm.rank) + '/'
        mozaik.mpi_comm.barrier()
    else:
        Global.root_directory = parameters.results_dir + ddir + '/'

    os.makedirs(Global.root_directory)
    if mozaik.mpi_comm and mozaik.mpi_comm.rank == 0:
        mozaik.mpi_comm.barrier()

    if mozaik.mpi_comm.rank == 0:
        #let's store the full and modified parameters, if we are the 0 rank process
        parameters.save(Global.root_directory + "parameters", expand_urls=True)
        import pickle
        f = open(Global.root_directory + "modified_parameters", "w")
        pickle.dump(modified_parameters, f)
        f.close()

    setup_logging()

    model = model_class(sim, num_threads, parameters)

    if mozaik.mpi_comm.rank == 0:
        #let's store some basic info about the simulation run
        f = open(Global.root_directory + "info", "w")
        f.write(
            str({
                'model_class': str(model_class),
                'model_docstring': model_class.__doc__,
                'simulation_run_name': simulation_run_name,
                'model_name': simulation_name,
                'creation_data': datetime.now().strftime('%d/%m/%Y-%H:%M:%S')
            }))
        f.close()

    #import cProfile
    #cProfile.run('run_experiments(model,create_experiments(model),parameters)','stats_new')

    data_store = run_experiments(model, create_experiments(model), parameters)

    if mozaik.mpi_comm.rank == 0:
        data_store.save()

    import resource
    print "Final memory usage: %iMB" % (
        resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / (1024))
    return (data_store, model)
Example #7
0
from parameters import ParameterSet

#mpi_comm = MPI.COMM_WORLD
logger = mozaik.getMozaikLogger()
simulation_name = "VogelsAbbott2005"
simulation_run_name, _, _, _, modified_parameters = parse_workflow_args()

if True:
    data_store,model = run_workflow(simulation_name,VogelsAbbott,create_experiments)
    model.connectors['ExcExcConnection'].store_connections(data_store)    
else: 
    setup_logging()
    data_store = PickledDataStore(
        load=True,
        parameters=ParameterSet(
            {
                "root_directory": result_directory_name(
                    simulation_run_name, simulation_name, modified_parameters
                ),
                "store_stimuli": False,
            }
        ),
        replace=True,
    )
    logger.info('Loaded data store')

#if mpi_comm.rank == 0:
print("Starting visualization")
perform_analysis_and_visualization(data_store)
data_store.save() 
Example #8
0
    # first check whether all parameter combinations contain the same parameter names
    assert len(
        set([tuple(set(comb.keys())) for comb in combinations])
    ) == 1, "The parameter search didn't occur over a fixed set of parameters"

    simulation_runs = []
    working_combinations = []
    for i, combination in enumerate(combinations):
        combination = dict([
            (x, y.decode('string_escape').decode('string_escape').replace(
                "'", '') if type(y) == str else y)
            for x, y in combination.items()
        ])

        rdn = result_directory_name('ParameterSearch', sys.argv[2],
                                    combination)
        print(rdn)
        try:
            simulation_runs.append(
                createSimulationRunDocumentAndUploadImages(
                    os.path.join(master_results_dir, rdn), gfs))
            working_combinations.append(combination)
        except Exception as e:
            print("WARRNING, error in: >> " + rdn + ".\n Error:" + str(e))

    document = {
        'submission_date':
        datetime.datetime.now().strftime('%d/%m/%Y-%H:%M:%S'),
        'name': simulation_name,
        #'name' : master_results_dir,
        'simulation_runs': simulation_runs,
Example #9
0
def run_workflow(simulation_name, model_class, create_experiments):
    """
    This is the main function that executes a workflow. 
    
    It expects it gets the simulation, class of the model, and a function that will create_experiments.
    The create experiments function get a instance of a model as the only parameter and it is expected to return 
    a list of Experiment instances that should be executed over the model.
    
    The run workflow will automatically parse the command line to determine the simulator to be used and the path to the root parameter file. 
    It will also accept . (point) delimited path to parameteres in the configuration tree, and corresponding values. It will replace each such provided
    parameter's value with the provided one on the command line. 
    
    Parameters
    ----------
    simulation_name : str
                    The name of the simulation.
    
    model_class : class
                The class from which the model instance will be created from.
    
    create_experiments : func
                       The function that returns the list of experiments that will be executed on the model.
    
    Examples
    --------
    The intended syntax of the commandline is as follows (note that the simulation run name is the last argument):
    
    >>> python userscript simulator_name num_threads parameter_file_path modified_parameter_path_1 modified_parameter_value_1 ... modified_parameter_path_n modified_parameter_value_n simulation_run_name
    """
    mozaik.setup_mpi()
        # Read parameters
    exec "import pyNN.nest as sim" in  globals(), locals()
    
    if len(sys.argv) > 4 and len(sys.argv)%2 == 1:
        simulation_run_name = sys.argv[-1]    
        simulator_name = sys.argv[1]
        num_threads = sys.argv[2]
        parameters_url = sys.argv[3]
        modified_parameters = { sys.argv[i*2+4] : eval(sys.argv[i*2+5])  for i in xrange(0,(len(sys.argv)-5)/2)}
    else:
        raise ValueError("Usage: runscript simulator_name num_threads parameter_file_path modified_parameter_path_1 modified_parameter_value_1 ... modified_parameter_path_n modified_parameter_value_n simulation_run_name")
        p
    parameters = load_parameters(parameters_url,modified_parameters)
    
    # Create results directory
    timestamp = datetime.now().strftime('%Y%m%d-%H%M%S')
    
    ddir  = result_directory_name(simulation_run_name,simulation_name,modified_parameters)
    
    if mozaik.mpi_comm and mozaik.mpi_comm.rank != 0:
        Global.root_directory = parameters.results_dir + ddir + '/' + str(mozaik.mpi_comm.rank) + '/'
        mozaik.mpi_comm.barrier()                                  
    else:
        Global.root_directory = parameters.results_dir + ddir + '/'
    
    
    os.makedirs(Global.root_directory)
    if mozaik.mpi_comm and mozaik.mpi_comm.rank == 0:
        mozaik.mpi_comm.barrier()
    
    #let's store the full and modified parameters, if we are the 0 rank process
    if mozaik.mpi_comm.rank == 0:
        parameters.save(Global.root_directory + "parameters", expand_urls=True)        
        import pickle
        f = open(Global.root_directory+"modified_parameters","w")
        pickle.dump(modified_parameters,f)
        f.close()

    setup_logging()
    
    model = model_class(sim,num_threads,parameters)
    data_store = run_experiments(model,create_experiments(model),parameters)

    if mozaik.mpi_comm.rank == 0:
	    data_store.save()

    import resource
    print "Final memory usage: %iMB" % (resource.getrusage(resource.RUSAGE_SELF).ru_maxrss/(1024))
    return (data_store,model)
Example #10
0
def prepare_workflow(simulation_name, model_class):
    """
    Executes the following preparatory steps for simulation workflow:
        - Load simulation parameters
        - Initialize random seeds
        - Create directory for results
        - Store loaded parameters
        - Setup logging
        - Store some initial info about the simulation

    Returns
    -------
    sim : module
          NEST module, to use for simulation
    num_threads : int
                 Number of threads to use for the simulation
    parameters : dict
                 Loaded parameters to initialize the simulation and model with
    """
    (
        simulation_run_name,
        simulator_name,
        num_threads,
        parameters_url,
        modified_parameters,
    ) = parse_workflow_args()

    # First we load the parameters just to retrieve seeds. We will throw them away, because at this stage the PyNNDistribution values were not yet initialized correctly.
    parameters = load_parameters(parameters_url, modified_parameters)
    p = {}
    if parameters.has_key('mozaik_seed'):
        p['mozaik_seed'] = parameters['mozaik_seed']
    if parameters.has_key('pynn_seed'):
        p['pynn_seed'] = parameters['pynn_seed']

    # Now initialize mpi with the seeds
    print "START MPI"
    mozaik.setup_mpi(**p)

    # Now really load parameters
    print "Loading parameters"
    parameters = load_parameters(parameters_url, modified_parameters)
    print "Finished loading parameters"

    exec "import pyNN.nest as sim" in globals(), locals()

    # Create results directory
    timestamp = datetime.now().strftime('%Y%m%d-%H%M%S')

    ddir = result_directory_name(simulation_run_name, simulation_name,
                                 modified_parameters)

    if mozaik.mpi_comm and mozaik.mpi_comm.rank != 0:
        Global.root_directory = parameters.results_dir + ddir + '/' + str(
            mozaik.mpi_comm.rank) + '/'
        mozaik.mpi_comm.barrier()
    else:
        Global.root_directory = parameters.results_dir + ddir + '/'

    os.makedirs(Global.root_directory)
    if mozaik.mpi_comm and mozaik.mpi_comm.rank == 0:
        mozaik.mpi_comm.barrier()

    if mozaik.mpi_comm.rank == 0:
        # Let's store the full and modified parameters, if we are the 0 rank process
        parameters.save(Global.root_directory + "parameters", expand_urls=True)
        import pickle
        f = open(Global.root_directory + "modified_parameters", "w")
        pickle.dump(modified_parameters, f)
        f.close()

    setup_logging()

    if mozaik.mpi_comm.rank == 0:
        # Let's store some basic info about the simulation run
        f = open(Global.root_directory + "info", "w")
        f.write(
            str({
                'model_class': str(model_class),
                'model_docstring': model_class.__doc__,
                'simulation_run_name': simulation_run_name,
                'model_name': simulation_name,
                'creation_data': datetime.now().strftime('%d/%m/%Y-%H:%M:%S')
            }))
        f.close()
    return sim, num_threads, parameters