def get(self,param): """ Get a value from sqlite database. There can be only one experiment id per loader. Returns an array containing time-dependent data or a single value. User does not need to worry which table the data is stored in, i.e whether it is of the sim_var, output_data etc.. types. """ try: self.data=sqldbutils.sim_data(self.db_path,self.experiment_id,param) #try and get the time-correlated samples for that param var=self.data.samples #first try and load time-varying data if it exists if len(var)>0: variable=var except: pass connection=sqldbutils.db_connect(self.db_path) #try and get it from output_params table using SQL query: try: query='SELECT numerical_value FROM output_params WHERE parameter=\''+param+'\' AND experiment_id ='+str(self.experiment_id) numerical_value=sqldbutils.execute_query(connection,query).fetchall()[0][0] if type(numerical_value)!=None: variable=numerical_value except: pass try: query='SELECT string_value FROM output_params WHERE parameter=\''+param+'\' AND experiment_id ='+str(self.experiment_id) string_value=sqldbutils.execute_query(connection,query).fetchall()[0][0] if type(string_value)!=None: variable=string_value except: pass #try and get it from sim_var table using SQL query: try: query='SELECT numerical_value FROM sim_var WHERE parameter=\''+param+'\' AND experiment_id ='+str(self.experiment_id) numerical_value=sqldbutils.execute_query(connection,query).fetchall()[0][0] if type(numerical_value)!=None: variable=numerical_value except: pass try: query='SELECT string_value FROM sim_var WHERE parameter=\''+param+'\' AND experiment_id ='+str(self.experiment_id) string_value=sqldbutils.execute_query(connection,query).fetchall()[0][0] if type(string_value)!=None: variable=string_value except: pass try: return variable #will either return a single value or time-varying (array) except: print 'Parameter not found in database'
def __condor_run(self, candidates, parameters): """ Run simulations on grid and analyse data locally (???I'm quite confused here...there is a mistake somewhere as the name doesn't match the description - which method is which?) Once each generation has finished, all data is pulled to local workstation in form of sqlite databases (1 database per job) and these are analysed and the fitness estimated sequentially the fitness array is then returned. """ import time import ssh_utils #Build submit and runx.sh files, exp_id now corresponds #to position in chromosome and fitness arrays self.context.__build_condor_files(candidates, parameters, candidates_per_job=self.cpj) #This is a file handling block.. #delete everything in the ssh_utilse directory you're about to put files in self.__delete_remote_files__() filelist = os.listdir(self.tmpdir) #copy local files over, some stuff is missing here as it needs to be an attribute in the condor context self.__put_multiple_files(filelist, localdir=self.tmpdir) filelist = os.listdir(self.portableswdir) #copy portable software files over: self.__put_multiple_files(filelist, localdir=self.portableswdir) #issue a command to the message host to issue commands to the grid: ssh_utils.issue_command( context.messagehost, 'export PATH=/opt/Condor/release/bin:$PATH\ncondor_submit submitfile.submit' ) #make a list of the database files we need: self.jobdbnames = [] for job_num in range(self.num_jobs): jobdbname = 'outputdb' + str(job_num) + '.sqlite' self.jobdbnames.append(jobdbname) #wait till we know file exists: dbs_created = False pulled_dbs = [ ] # list of databases which have been extracted from remote server while (dbs_created == False): print('waiting..') time.sleep(20) print('checking if dbs created:') command = 'ls' remote_filelist = ssh_utils.issue_command(self.messagehost, command) for jobdbname in self.jobdbnames: db_exists = jobdbname + '\n' in remote_filelist if (db_exists == False): print(jobdbname + ' has not been generated') dbs_created = False elif db_exists == True and jobdbname not in pulled_dbs: print(jobdbname + ' has been generated') remotefile = optimizer_params.remotedir + jobdbname localpath = os.path.join(self.datadir, str(self.generation) + jobdbname) ssh_utils.get_file(self.messagehost, remotefile, localpath) pulled_dbs.append( jobdbname) #so that it is not extracted more than once #here pop-in the fitness evaluation if len(pulled_dbs) == len(self.jobdbnames): dbs_created = True #this block can be simplified, it need simply return exp_data containers fitness = [] for CandidateData in self.CandidateData_list: job_num = CandidateData.job_num dbname = str( self.generation) + 'outputdb' + str(job_num) + '.sqlite' dbpath = os.path.join(self.datadir, dbname) exp_id = CandidateData.exp_id connection = sqldbutils.db_connect( dbpath) #establish a database connection query = 'SELECT numerical_value\ FROM output_params WHERE experiment_id=\ ' + str(exp_id) + ' AND parameter="fitness"' exp_fitness = sqldbutils.execute_query(connection, query) exp_fitness = exp_fitness.fetchall() exp_fitness = exp_fitness[0][0] print('Fitness:') print(exp_fitness) fitness.append(exp_fitness) self.generation += 1 return fitness
def __condor_run(self,candidates,parameters): """ Run simulations on grid and analyse data locally (???I'm quite confused here...there is a mistake somewhere as the name doesn't match the description - which method is which?) Once each generation has finished, all data is pulled to local workstation in form of sqlite databases (1 database per job) and these are analysed and the fitness estimated sequentially the fitness array is then returned. """ import time import ssh_utils #Build submit and runx.sh files, exp_id now corresponds #to position in chromosome and fitness arrays self.context.__build_condor_files(candidates,parameters, candidates_per_job=self.cpj) #This is a file handling block.. #delete everything in the ssh_utilse directory you're about to put files in self.__delete_remote_files__() filelist=os.listdir(self.tmpdir) #copy local files over, some stuff is missing here as it needs to be an attribute in the condor context self.__put_multiple_files(filelist,localdir=self.tmpdir) filelist=os.listdir(self.portableswdir) #copy portable software files over: self.__put_multiple_files(filelist,localdir=self.portableswdir) #issue a command to the message host to issue commands to the grid: ssh_utils.issue_command(context.messagehost, 'export PATH=/opt/Condor/release/bin:$PATH\ncondor_submit submitfile.submit') #make a list of the database files we need: self.jobdbnames=[] for job_num in range(self.num_jobs): jobdbname='outputdb'+str(job_num)+'.sqlite' self.jobdbnames.append(jobdbname) #wait till we know file exists: dbs_created=False pulled_dbs=[] # list of databases which have been extracted from remote server while (dbs_created==False): print('waiting..') time.sleep(20) print('checking if dbs created:') command='ls' remote_filelist=ssh_utils.issue_command(self.messagehost, command) for jobdbname in self.jobdbnames: db_exists=jobdbname+'\n' in remote_filelist if (db_exists==False): print(jobdbname+' has not been generated') dbs_created=False elif db_exists==True and jobdbname not in pulled_dbs: print(jobdbname+' has been generated') remotefile=optimizer_params.remotedir+jobdbname localpath=os.path.join(self.datadir,str(self.generation)+jobdbname) ssh_utils.get_file(self.messagehost,remotefile,localpath) pulled_dbs.append(jobdbname) #so that it is not extracted more than once #here pop-in the fitness evaluation if len(pulled_dbs)==len(self.jobdbnames): dbs_created=True #this block can be simplified, it need simply return exp_data containers fitness=[] for CandidateData in self.CandidateData_list: job_num = CandidateData.job_num dbname=str(self.generation)+'outputdb'+str(job_num)+'.sqlite' dbpath=os.path.join(self.datadir,dbname) exp_id=CandidateData.exp_id connection=sqldbutils.db_connect(dbpath) #establish a database connection query='SELECT numerical_value\ FROM output_params WHERE experiment_id=\ '+str(exp_id)+' AND parameter="fitness"' exp_fitness=sqldbutils.execute_query(connection,query) exp_fitness=exp_fitness.fetchall() exp_fitness=exp_fitness[0][0] print('Fitness:') print(exp_fitness) fitness.append(exp_fitness) self.generation+=1 return fitness
'sqlite',experiment_id=1) print loader.get('simulation_name') #example of getting an item which was result of a calculation, #saved using writer.write method of writer object at runtime print loader.get('example value') #loading the voltage-vector of a recording: soma_voltage_2=loader.get('soma_voltage_2_mV') #get the corresponding time-vector t=loader.get_timeseries('soma_voltage_2_mV') ###PART 2 - using sqldbutils.py### import sqldbutils #note:REPLACE FILEPATH db_path='/home/mike/dev/nrnproject/sims/outputdb.sqlite' #create an exp_data object for that experiment exp_data=sqldbutils.sim_data('/home/mike/dev/nrnproject/sims/output.sqlite',1) #collect raw data, currently only loads voltage_mV timeseries t= exp_data.t samples=exp_data.samples #examples of how to extract other qyery-dependent data (sqlite database) connection=sqldbutils.db_connect('/home/mike/dev/nrnproject/sims/output.sqlite') query_output=sqldbutils.execute_query(connection,'SELECT experiment_id FROM output_params WHERE parameter=\'example_param_2\' AND numerical_value>1') exp_ids=query_output.fetchall()