def setup(param_file=None): if param_file: p = build_parameters(param_file) base_path = path.join(path.dirname(__file__), 'test', 'data') else: p = build_parameters(sys.argv[1]) base_path = p['base_path'] output_dir = path.join(base_path, p['result_path'], p['sumatra_label']) if not path.exists(output_dir): os.mkdir(output_dir) return p, base_path, output_dir
def smt_run(self, line): args = parse_argstring(self.smt_run, line) global parameters if args.flush: parameters = build_parameters(args.parameters) else: parameters = globals().get('parameters',build_parameters(args.parameters)) global save save = args.save if args.print: print(12*"-" + " Script " + 12*"-") with open(args.main_file, 'r') as f: script = f.readlines() f.closed print(''.join(script), end='') print(32*"-", end="\n\n") print(10*"-" + " Parameters " + 10*"-") print(parameters) print(32*"-", end="\n\n") if args.record is True: global record project = load_project() record = project.new_record(main_file=os.path.relpath(args.main_file),parameters=parameters) print("Record label for this run: '%s'" %record.label) start_time = time.time() execfile(args.main_file, globals(), parameters.as_dict()) duration = time.time() - start_time if args.record is True: fname = "%s"%record.label if globals().has_key('data'): np.savetxt("Data/%s.dat"%fname, data) if globals().has_key('fig'): fig.savefig("Data/%s.png"%fname) record.duration = duration record.output_data = record.datastore.find_new_data(record.timestamp) project.add_record(record) project.save() print("Data keys are [%s(%s [%s])"%(record.label, record.version, record.timestamp)) elif save is True: fname = "%s_%s" %(time.strftime("%y%m%d-%H%M%S", time.gmtime(start_time)), os.path.splitext(os.path.basename(args.main_file))[0]) if globals().has_key('data'): np.savetxt("%s.dat"%fname, data) # Save data if globals().has_key('fig'): fig.savefig("%s.png"%fname) print("Duration: %.2fs" %duration)
def create_functional_network(cellParamName, nwParamName): ''' Public interface: used for creating fixed functional connectivity. cellParamName - parameter file of postsynaptic neuron nwParamName - parameter file of anatomical network ''' preParam = build_parameters(cellParamName) neuronParam = preParam.neuron nwParam = build_parameters(nwParamName) for mech in nwParam.NMODL_mechanisms.values(): neuron.load_mechanisms(mech) parser = cell_parser.CellParser(neuronParam.filename) parser.spatialgraph_to_cell() nwMap = NetworkMapper(parser.cell, nwParam) nwMap.create_functional_realization()
def register_record(label, reason=None, tag=None): """ Register a simulation to the sumatra project. Loads the sumatra project in the current repository Parameters ---------- label : str Simulation label reason : str, optional Reason for the simulation run stored in the sumatra database tag : str, optional Tag for the simulation run stored in the sumatra database """ project = load_project() para_fn = os.path.join(data_path, label, '_'.join( ('custom_params', label))) parameters = build_parameters(para_fn) record = project.new_record(parameters=parameters, main_file='nest_simulation.py', reason=reason, label=label) record.duration = 0. # Add 0 for now and update later project.add_record(record) project.save() if tag is not None: project.add_tag(label, tag)
def create_synapse_realization(pname): parameters = build_parameters(pname) cellParam = parameters.network.post preParam = parameters.network.pre parser = cell_parser.CellParser(cellParam.filename) parser.spatialgraph_to_cell() cell = parser.cell for preType in preParam.keys(): synapseFName = preParam[preType].synapses.distributionFile synDist = reader.read_scalar_field(synapseFName) mapper = SynapseMapper(cell, synDist) mapper.create_synapses(preType) for synType in cell.synapses.keys(): name = parameters.info.outputname name += '_' name += synType name += '_syn_realization' uniqueID = str(os.getpid()) timeStamp = time.strftime('%Y%m%d-%H%M') name += '_' + timeStamp + '_' + uniqueID synapseList = [] for syn in cell.synapses[synType]: synapseList.append(syn.coordinates) writer.write_landmark_file(name, synapseList) tmpSyns = {} tmpSyns[synType] = cell.synapses[synType] writer.write_cell_synapse_locations(name+'.syn', tmpSyns, cell.id)
def parse_arguments(args, input_datastore, stdin=None, stdout=None, allow_command_line_parameters=True): cmdline_parameters = [] script_args = [] parameter_sets = [] input_data = [] for arg in args: have_parameters = False if os.path.isfile(arg): # could be a parameter file or a data file parameters = build_parameters(arg) if parameters is not None: parameter_sets.append(parameters) script_args.append("<parameters>") have_parameters = True if not have_parameters: if input_datastore.contains_path(arg): data_key = input_datastore.generate_keys(arg) input_data.extend(data_key) script_args.append(arg) elif allow_command_line_parameters and "=" in arg: # cmdline parameter cmdline_parameters.append(arg) else: # a flag or something, passed on unchanged script_args.append(arg) if stdin: script_args.append("< %s" % stdin) if input_datastore.contains_path(stdin): data_key = input_datastore.generate_keys(stdin) input_data.extend(data_key) else: raise IOError("File does not exist: %s" % stdin) if stdout: script_args.append("> %s" % stdout) assert len( parameter_sets ) < 2, "No more than one parameter file may be supplied." # temporary restriction if cmdline_parameters: if parameter_sets: ps = parameter_sets[0] for cl in cmdline_parameters: try: ps.update(ps.parse_command_line_parameter(cl)) except ValueError as v: message, name, value = v.args warnings.warn(message) warnings.warn( "'{0}={1}' not defined in the parameter file".format( name, value)) ps.update( {name: value}) ## for now, add the command line param anyway else: raise Exception( "Command-line parameters supplied but without a parameter file to put them into." ) # ought really to have a more specific Exception and to catch it so as to give a helpful error message to user return parameter_sets, input_data, " ".join(script_args)
def parse_arguments(args, input_datastore, stdin=None, stdout=None, allow_command_line_parameters=True): cmdline_parameters = [] script_args = [] parameter_sets = [] input_data = [] for arg in args: have_parameters = False if os.path.isfile(arg): # could be a parameter file or a data file parameters = build_parameters(arg) if parameters is not None: parameter_sets.append(parameters) script_args.append("<parameters>") have_parameters = True if not have_parameters: if arg[0] == "/": path = arg else: path = os.path.relpath(arg, input_datastore.root) if input_datastore.contains_path(path): data_key = input_datastore.generate_keys(path) input_data.extend(data_key) script_args.append(arg) elif allow_command_line_parameters and "=" in arg: # cmdline parameter cmdline_parameters.append(arg) else: # a flag or something, passed on unchanged script_args.append(arg) if stdin: script_args.append("< %s" % stdin) if input_datastore.contains_path(stdin): data_key = input_datastore.generate_keys(stdin) input_data.extend(data_key) else: raise IOError("File does not exist: %s" % stdin) if stdout: script_args.append("> %s" % stdout) assert len(parameter_sets) < 2, "No more than one parameter file may be supplied." # temporary restriction if cmdline_parameters: if parameter_sets: ps = parameter_sets[0] for cl in cmdline_parameters: try: ps.update(ps.parse_command_line_parameter(cl)) except ValueError as v: message, name, value = v.args warnings.warn(message) warnings.warn("'{0}={1}' not defined in the parameter file".format(name, value)) ps.update({name: value}) ## for now, add the command line param anyway else: raise Exception("Command-line parameters supplied but without a parameter file to put them into.") # ought really to have a more specific Exception and to catch it so as to give a helpful error message to user return parameter_sets, input_data, " ".join(script_args)
def test__build_parameters_config(self): P = build_parameters("test_file.config") self.assertEqual( P.as_dict(), { 'sectionA': { 'a': '2', 'b': '3' }, 'sectionB': { 'c': 'hello', 'd': 'world' } }) self.assertIsInstance(P, ConfigParserParameterSet)
def sumatra_start(repository, sumatra_db_path, results_path, working_dir, hg_username, sumatra_run_name, parameters): '''Clones the Omics Pipe repository from Bitbucket, creates a Sumatra project, and creates a Sumatra record for the current run''' print "sumatra_db_path is " + sumatra_db_path print type(sumatra_db_path) check_create_dir(sumatra_db_path) os.chdir(sumatra_db_path) repo1 = hgapi.Repo(repository) repo_path = sumatra_db_path +"/omics_pipe" repo= {"url":repo_path, "type":"sumatra.versioncontrol._mercurial.MercurialRepository", "upstream":repository} executable= {"path":"", "version": "", "type":"sumatra.programs.PythonExecutable", "options":"", "name": "Python"} sumatra_launch_mode = {"working_directory": working_dir, "type": "sumatra.launch.SerialLaunchMode"} data_store1 = {"root":results_path, "type": "sumatra.datastore.filesystem.FileSystemDataStore"} database_path = sumatra_db_path + "/records/recordstore.db" record_store1 = {"db_file": database_path, "type": "sumatra.recordstore.django_store.DjangoRecordStore"} input_datastore1 = {"root": results_path, "type": "sumatra.datastore.filesystem.FileSystemDataStore"} while True: try: repo1.hg_clone(url = repository, path=repo_path) with open(repo_path + "/.hg/hgrc", "a") as myfile: myfile.write("[ui]\nusername= "******"Omics pipe repository cloned to : " + repo_path break except hgapi.hgapi.HgException: print "Omics pipe repository already exists." break while True: try: Project(sumatra_run_name, default_repository=repo, default_executable=executable, default_launch_mode = sumatra_launch_mode, on_changed='store-diff', data_store=data_store1, record_store=record_store1, input_datastore=input_datastore1) print "Sumatra project created: " + sumatra_run_name + " in directory: " + sumatra_db_path break except Exception: print "Sumatra project already exists, loading project: " + sumatra_run_name break project = load_project(path=sumatra_db_path) print project sumatra_params = build_parameters(parameters) print sumatra_params os.chdir(repo_path) repo_main = "omics_pipe/main.py" record = project.new_record(parameters=sumatra_params, main_file=repo_main) print record return record,project
def parse_arguments(args, input_datastore, stdin=None, stdout=None, allow_command_line_parameters=True): cmdline_parameters = {} script_args = [] parameter_sets = [] input_data = [] for arg in args: have_parameters = False if os.path.isfile(arg): # could be a parameter file or a data file try: parameter_sets.append(build_parameters(arg)) script_args.append("<parameters>") have_parameters = True except SyntaxError: pass if not have_parameters: if input_datastore.contains_path(arg): data_key = input_datastore.generate_keys(arg) input_data.extend(data_key) script_args.append(arg) elif allow_command_line_parameters and "=" in arg: # cmdline parameter cmdline_parameters.update(parse_command_line_parameter(arg)) else: # a flag or something, passed on unchanged script_args.append(arg) if stdin: script_args.append("< %s" % stdin) if input_datastore.contains_path(stdin): data_key = input_datastore.generate_keys(stdin) input_data.extend(data_key) else: raise IOError("File does not exist: %s" % stdin) if stdout: script_args.append("> %s" % stdout) assert len(parameter_sets) < 2, "No more than one parameter file may be supplied." # temporary restriction if cmdline_parameters: if parameter_sets: parameter_sets[0].update(cmdline_parameters) else: raise Exception("Command-line parameters supplied but without a parameter file to put them into.") # ought really to have a more specific Exception and to catch it so as to give a helpful error message to user return parameter_sets, input_data, " ".join(script_args)
def test__build_parameters_yaml(self): P = build_parameters("test_file.yaml") self.assertEqual(P.as_dict(), {'x': 2, 'y': {'a': 3, 'b': 4}}) self.assertIsInstance(P, YAMLParameterSet)
#!/usr/bin/env python import os import sys from sumatra.parameters import build_parameters print("Hello world.") # It is important not to hard-code the name of the parameter file because # Sumatra creates a parameter file 'on the fly' and passes its name to the # script, thus we read its name from the command line arguments. paramsfile = sys.argv[1] parameters = build_parameters(paramsfile) print("Parameters: {}".format(parameters)) # Change into the datastore directory to run the simulation there wrkdir = os.path.join('Data', parameters['sumatra_label']) os.chdir(wrkdir) with open('sample_output.txt', 'w') as f: f.write('Hello world!\n')
def test__build_parameters_config(self): P = build_parameters("test_file.config") self.assertEqual(P.as_dict(), {'sectionA': {'a': '2', 'b': '3'}, 'sectionB': {'c': 'hello', 'd': 'world'}}) self.assertIsInstance(P, ConfigParserParameterSet)
def test__build_parameters_simple(self): P = build_parameters("test_file.simple") self.assertEqual(P.as_dict(), {'x': 2, 'y': 3}) self.assertIsInstance(P, SimpleParameterSet)
import os, glob from sumatra.parameters import build_parameters, NTParameterSet installationDirectory = os.path.abspath(os.path.dirname(__file__)) controlFolder = os.path.join(installationDirectory, 'evoked_activity/control') manipulation1Folder = os.path.join(installationDirectory, 'evoked_activity/manipulation1') if not os.path.exists(manipulation1Folder): os.makedirs(manipulation1Folder) for fname in glob.glob(os.path.join(controlFolder, '*')): if fname.endswith('.param'): outName = fname.replace('control', 'manipulation1') # load control file as ParameterSet controlParameters = build_parameters(fname) # pop population (manipulation 1: L5/6BS in E2) newParameters = controlParameters.tree_copy() for population in newParameters.network.keys(): if 'L6cc_E2' == population: newParameters.network.pop(population) # save ParameterSet as new file newParameters.save(outName) else: continue header = '#!/bin/bash\n' header += '\n' header += 'cd ' header += manipulation1Folder header += '\n'
def _merge_params_and_taskinputs(cls, params, taskinputs): """ params: arguments passed as a dictionary to constructor As a special case, if a task has only one input, it does not need to be wrapped in a dict (i.e. `params` can be the value itself). taskinputs: arguments passed directly as keywords to constructor This function does the following: + Merge dictionary and keyword arguments. Keyword arguments take precedence. + Check that all arguments required by task `run()` signature are provided. + Retrieve any missing argument values from the defaults in `run()` signature. + Cast every input to its expected type. If an input defines multiple allowable types, the left-most one takes precedence. """ if params is None: params = {} elif isinstance(params, str): params = build_parameters(params) elif isinstance(params, dict): params = ParameterSet(params) else: if len(cls.inputs) == 1: # For tasks with only one input, don't require dict θname, θtype = next(iter(cls.inputs.items())) if len(taskinputs) > 0: raise TypeError(f"Argument given by name {θname} " "and position.") # if not isinstance(taskinputs, θtype): # # Cast to correct type # taskinputs = cast(taskinputs, θtype) taskinputs = ParameterSet({θname: taskinputs}) else: raise ValueError("`params` should be either a dictionary " "or a path to a parameter file, however it " "is of type {}.".format(type(params))) taskinputs = {**params, **taskinputs} sigparams = inspect.signature(cls._run).parameters required_inputs = [ p.name for p in sigparams.values() if p.default is inspect._empty ] default_inputs = { p.name: p.default for p in sigparams.values() if p.default is not inspect._empty } if type(cls.__dict__['_run']) is not staticmethod: # instance and class methods already provide 'self' or 'cls' firstarg = required_inputs.pop(0) # Only allowing 'self' and 'cls' ensures we don't accidentally # remove true input arguments assert firstarg in ("self", "cls") if not all((p in taskinputs) for p in required_inputs): raise TypeError("Missing required inputs '{}'.".format( set(required_inputs).difference(taskinputs))) # Add default inputs so they are recorded as task arguments taskinputs = {**default_inputs, **taskinputs} # Finally, cast all task inputs for name, θ in taskinputs.items(): θtype = cls.inputs[name] if isinstance(θ, LazyCastTypes): # Can't cast e.g. tasks: they haven't been executed yet continue elif not isinstance(θ, θtype): taskinputs[name] = cast(θ, θtype, 'input') return taskinputs
def test__build_parameters_hierarchical(self): P = build_parameters("test_file.hierarchical") self.assertEqual(P.as_dict(), {'x': 2, 'y': {'a': 3, 'b': 4}}) self.assertIsInstance(P, (JSONParameterSet, YAMLParameterSet, NTParameterSet))
def save(self): ifile = open(self.data_file, 'w+') ifile.write(str(self.result)) ifile.close() def main(parameters, label): res = {} x = np.linspace(0,4,100) for i, params in parameters.items(): a, b, c, d, e = params.a, params.b, params.c, params.d, params.e y = func(x, a, b, c, d, e) yn = y + 0.2*np.random.normal(size=len(x)) try: popt, pcov = curve_fit(func, x, yn) res[i] = {'popt': list(popt), 'actual': [a, b, c, d, e]} except RuntimeError: pass analyseOb = Analyse(result=res, label=label) analyseOb.save() analyseOb.plot() def func(x, a, b, c, d, e): return a*np.exp(-b*x) + (c + d + e - 2*c)*pow(x, 0.21) if __name__ == '__main__': options = sys.argv[1:] label = options[-1] # label is added to the end of the command line parameter_file = sys.argv[1] parameters = build_parameters(parameter_file) # Sumatra build parameters here main(parameters, label)
def test__build_parameters_json(self): P = build_parameters("test_file.json") self.assertEqual(P.as_dict(), {'x': 2, 'y': {'a': 3, 'b': 4}}) self.assertIsInstance(P, JSONParameterSet)
Usage: ----- python run_tvb_sims_main.py <parameter_file> """ import sys from sumatra.parameters import build_parameters from sumatra.decorators import capture sys.path.append("/media/sf_SharedFolder/Code/git_repos_of_mine/tvb-scripting") from tvb_scripting.Runner import Sim @capture def main(pset_smt): Ps = pset_smt.as_dict() S = Sim(Ps) S.run() # Read parameter file parameter_file = sys.argv[1] pset_smt = build_parameters(parameter_file) # Run main(pset_smt)
def sumatra_start(repository, sumatra_db_path, results_path, working_dir, hg_username, sumatra_run_name, parameters): '''Clones the Omics Pipe repository from Bitbucket, creates a Sumatra project, and creates a Sumatra record for the current run''' print "sumatra_db_path is " + sumatra_db_path print type(sumatra_db_path) check_create_dir(sumatra_db_path) os.chdir(sumatra_db_path) repo1 = hgapi.Repo(repository) repo_path = sumatra_db_path + "/omics_pipe" repo = { "url": repo_path, "type": "sumatra.versioncontrol._mercurial.MercurialRepository", "upstream": repository } executable = { "path": "", "version": "", "type": "sumatra.programs.PythonExecutable", "options": "", "name": "Python" } sumatra_launch_mode = { "working_directory": working_dir, "type": "sumatra.launch.SerialLaunchMode" } data_store1 = { "root": results_path, "type": "sumatra.datastore.filesystem.FileSystemDataStore" } database_path = sumatra_db_path + "/records/recordstore.db" record_store1 = { "db_file": database_path, "type": "sumatra.recordstore.django_store.DjangoRecordStore" } input_datastore1 = { "root": results_path, "type": "sumatra.datastore.filesystem.FileSystemDataStore" } while True: try: repo1.hg_clone(url=repository, path=repo_path) with open(repo_path + "/.hg/hgrc", "a") as myfile: myfile.write("[ui]\nusername= "******"Omics pipe repository cloned to : " + repo_path break except hgapi.hgapi.HgException: print "Omics pipe repository already exists." break while True: try: Project(sumatra_run_name, default_repository=repo, default_executable=executable, default_launch_mode=sumatra_launch_mode, on_changed='store-diff', data_store=data_store1, record_store=record_store1, input_datastore=input_datastore1) print "Sumatra project created: " + sumatra_run_name + " in directory: " + sumatra_db_path break except Exception: print "Sumatra project already exists, loading project: " + sumatra_run_name break project = load_project(path=sumatra_db_path) print project sumatra_params = build_parameters(parameters) print sumatra_params os.chdir(repo_path) repo_main = "omics_pipe/main.py" record = project.new_record(parameters=sumatra_params, main_file=repo_main) print record return record, project
# set the eigensolver properties. walk.EigSolver.setEigSolver(tol=1.e-3) # create a handle to watch the probability at nodes -5,0,1: walk.watch([0, 1, -5]) # Propagate the CTQW using the Chebyshev method # for t=100s in timesteps of dt=0.01 # Note that psiToInit() is being used rather than global timesteps. for i in range(int(t / 0.01)): walk.propagate(0.01, method='chebyshev') walk.psiToInit() # plot the marginal probabilities # after propagation over all nodes walk.plot('out/{}-plot.png'.format(parameters["sumatra_label"])) # plot the probability over time for the watched nodes walk.plotNodes('out/{}-nodes.png'.format(parameters["sumatra_label"])) # export final state walk.exportState("out/{}-state.txt".format(parameters["sumatra_label"]), "txt") # destroy the quantum walk walk.destroy() parameter_file = sys.argv[1] parameters = build_parameters(parameter_file) main(parameters)
def test__build_parameters_hierarchical(self): P = build_parameters("test_file.hierarchical") self.assertEqual(P.as_dict(), {'x': 2, 'y': {'a': 3, 'b': 4}}) self.assertIsInstance( P, (JSONParameterSet, YAMLParameterSet, NTParameterSet))
import ConfigParser from numpy.random import multinomial from numpy import array import csv import time import sys from sumatra.projects import load_project from sumatra.parameters import build_parameters from os import popen parameter_file = sys.argv[1] parameters = build_parameters(parameter_file) parameters.update({"parameter_file":parameter_file}) project = load_project() record = project.new_record(parameters=parameters, main_file=__file__, reason="reason for running this simulation") parameters.update({"sumatra_label": record.label}) start_time = time.time() cmd = r"/c/program files/R/R-2.15.0/bin/Rscript.exe main.r %s %s" % (parameter_file,record.label) print "Running command", cmd fin = popen(cmd) print fin.read() fin.close() record.duration = time.time() - start_time record.output_data = record.datastore.find_new_data(record.timestamp) project.add_record(record)
from os import path import pickle import os from hcluster import pdist, linkage, dendrogram # define what should happen when a point is picked def onpick(event): plt.subplot(2, 1, 1) event.artist.figure.axes[0].texts = [] plt.annotate(event.artist.name, (event.artist._x, event.artist._y)) # setup p = build_parameters(sys.argv[1]) result_path = path.join(p['base_path'], p['result_path']) output_dir = path.join(result_path, p['sumatra_label']) if not path.exists(output_dir): os.mkdir(output_dir) logger = tools.get_logger('gensim', path.join(output_dir, "run.log")) logger.info("running %s" % ' '.join(sys.argv)) data = pickle.load(open(path.join(result_path, p['data_label'], 'data.pickle'))) for key, val in data.iteritems(): # for bla in [1]: # key, val = 'eagle', data['eagle']
import os import sys from sumatra.parameters import build_parameters # The following line is important because Sumatra creates a parameter file # 'on the fly' and passes its name to the script, so we should *not* use a # hard-coded filename here. paramsfile = sys.argv[1] parameters = build_parameters(paramsfile) # I like printing the sumatra label of this run: smt_label = parameters['sumatra_label'] print "Sumatra label for this run: {}".format(smt_label) sys.stdout.flush() # Change into the datastore directory to run the simulation there. # Note that this has to happen *after* reading the parameter above, # otherwise it won't find the parameter file. os.chdir(os.path.join('Data', smt_label)) # The variable 'parameters' defined above is a dictionary associating # each parameter name with its value, so we can use this neat trick to # make the parameters available as global variables: globals().update(parameters) # Alternatively, if we don't want to resort to "black magic", we can # assign each parameter value separately to a variable: Msat = parameters['Msat'] H_ext = parameters['H_ext'] A = parameters['A'] # ... etc. ...