Esempio n. 1
0
    def __condor_evaluate(self,candidates,args):
        """
        Run simulations on grid and analyse data locally
        WARNING: (???I'm quite confused here...there is a mistake somewhere
        as the name doesn't match the description - which method is which?)
      
            Once each generation has finished, all data is pulled to local
            workstation in form of sqlite databases (1 database per job)
            and these are analysed and the fitness estimated sequentially
            the fitness array is then returned.
        """
        
        import time
        import ssh_utils
        
        self.CandidateData_list=[]
       
        self.__build_condor_files(candidates) #Build submit and runx.sh files, exp_id now corresponds to position in chromosome and fitness arrays
        
        messagehost=ssh_utils.host(optimizer_params.host,optimizer_params.username,optimizer_params.password,optimizer_params.port)
        
        self.__delete_remote_files__(messagehost)#delete everything in thssh_utilse directory you're about to put files in
        filelist=os.listdir(self.tmpdir)
        self.__put_multiple_files(messagehost,filelist,localdir=self.tmpdir,remotedir=optimizer_params.remotedir)#copy local files over
        filelist=os.listdir(self.portableswdir)
        self.__put_multiple_files(messagehost,filelist,localdir=self.portableswdir,remotedir=optimizer_params.remotedir)#copy local files over
        
        ssh_utils.issue_command(messagehost,'export PATH=/opt/Condor/release/bin:$PATH\ncondor_submit submitfile.submit')
        
        self.jobdbnames=[]
        for job_num in range(self.num_jobs):        #make a list of the databases we need:
            jobdbname='outputdb'+str(job_num)+'.sqlite'
            self.jobdbnames.append(jobdbname)
           
        #wait till you know file exists:
        dbs_created=False
        pulled_dbs=[] # list of databases which have been extracted from remote server
        while (dbs_created==False):
            print 'waiting..'
            time.sleep(20)            
            print 'checking if dbs created:'
            command='ls'
            remote_filelist=ssh_utils.issue_command(messagehost, command)

            for jobdbname in self.jobdbnames:

                db_exists=jobdbname+'\n' in remote_filelist
                
                if (db_exists==False):
                    print jobdbname,' has not been generated'
                    dbs_created=False

                elif db_exists==True and jobdbname not in pulled_dbs:
                    print jobdbname,' has been generated'
                    remotefile=optimizer_params.remotedir+jobdbname
                    localpath=os.path.join(self.datadir,str(self.generation)+jobdbname)
                    ssh_utils.get_file(messagehost,remotefile,localpath)
                    pulled_dbs.append(jobdbname) #so that it is not extracted more than once
                    #here pop-in the fitness evaluation

                if len(pulled_dbs)==len(self.jobdbnames):
                    dbs_created=True
            
        fitness=[]
        for CandidateData in self.CandidateData_list:
            job_num = CandidateData.job_num
            
            dbname=str(self.generation)+'outputdb'+str(job_num)+'.sqlite'
            dbpath=os.path.join(self.datadir,dbname)
            exp_id=CandidateData.exp_id
            
            connection=sqldbutils.db_connect(dbpath) #establish a database connection
            query='SELECT numerical_value\
                    FROM output_params WHERE experiment_id=\
                    '+str(exp_id)+' AND parameter="fitness"'       

            exp_fitness=sqldbutils.execute_query(connection,query)
            exp_fitness=exp_fitness.fetchall()
            exp_fitness=exp_fitness[0][0]
            
            print 'fitness:'
            print exp_fitness
    
            fitness.append(exp_fitness)

        self.generation+=1
        return fitness
Esempio n. 2
0
    def __condor_run(self, candidates, parameters):
        """
        Run simulations on grid and analyse data locally (???I'm quite confused here...there is a mistake somewhere as the name doesn't match the description - which method is which?)
      
            Once each generation has finished, all data is pulled to local
            workstation in form of sqlite databases (1 database per job)
            and these are analysed and the fitness estimated sequentially
            the fitness array is then returned.
        """

        import time
        import ssh_utils

        #Build submit and runx.sh files, exp_id now corresponds
        #to position in chromosome and fitness arrays
        self.context.__build_condor_files(candidates,
                                          parameters,
                                          candidates_per_job=self.cpj)

        #This is a file handling block..
        #delete everything in the ssh_utilse directory you're about to put files in
        self.__delete_remote_files__()
        filelist = os.listdir(self.tmpdir)
        #copy local files over, some stuff is missing here as it needs to be an attribute in the condor context
        self.__put_multiple_files(filelist, localdir=self.tmpdir)
        filelist = os.listdir(self.portableswdir)
        #copy portable software files over:
        self.__put_multiple_files(filelist, localdir=self.portableswdir)

        #issue a command to the message host to issue commands to the grid:
        ssh_utils.issue_command(
            context.messagehost,
            'export PATH=/opt/Condor/release/bin:$PATH\ncondor_submit submitfile.submit'
        )

        #make a list of the database files we need:
        self.jobdbnames = []
        for job_num in range(self.num_jobs):
            jobdbname = 'outputdb' + str(job_num) + '.sqlite'
            self.jobdbnames.append(jobdbname)

        #wait till we know file exists:
        dbs_created = False
        pulled_dbs = [
        ]  # list of databases which have been extracted from remote server
        while (dbs_created == False):
            print('waiting..')
            time.sleep(20)
            print('checking if dbs created:')
            command = 'ls'
            remote_filelist = ssh_utils.issue_command(self.messagehost,
                                                      command)
            for jobdbname in self.jobdbnames:
                db_exists = jobdbname + '\n' in remote_filelist
                if (db_exists == False):
                    print(jobdbname + ' has not been generated')
                    dbs_created = False
                elif db_exists == True and jobdbname not in pulled_dbs:
                    print(jobdbname + ' has been generated')
                    remotefile = optimizer_params.remotedir + jobdbname
                    localpath = os.path.join(self.datadir,
                                             str(self.generation) + jobdbname)
                    ssh_utils.get_file(self.messagehost, remotefile, localpath)
                    pulled_dbs.append(
                        jobdbname)  #so that it is not extracted more than once
                    #here pop-in the fitness evaluation
                if len(pulled_dbs) == len(self.jobdbnames):
                    dbs_created = True

        #this block can be simplified, it need simply return exp_data containers
        fitness = []
        for CandidateData in self.CandidateData_list:
            job_num = CandidateData.job_num

            dbname = str(
                self.generation) + 'outputdb' + str(job_num) + '.sqlite'
            dbpath = os.path.join(self.datadir, dbname)
            exp_id = CandidateData.exp_id

            connection = sqldbutils.db_connect(
                dbpath)  #establish a database connection
            query = 'SELECT numerical_value\
                    FROM output_params WHERE experiment_id=\
                    ' + str(exp_id) + ' AND parameter="fitness"'

            exp_fitness = sqldbutils.execute_query(connection, query)
            exp_fitness = exp_fitness.fetchall()
            exp_fitness = exp_fitness[0][0]

            print('Fitness:')
            print(exp_fitness)

            fitness.append(exp_fitness)

        self.generation += 1
        return fitness
Esempio n. 3
0
 def __delete_remote_files(self,host):
     import ssh_utils
     command='rm -rf ./*'
     ssh_utils.issue_command(host, command)
Esempio n. 4
0
 def __delete_remote_files(self, host):
     import ssh_utils
     command = 'rm -rf ./*'
     ssh_utils.issue_command(host, command)