def __init__(self, config, maxCores, maxMemory): AbstractBatchSystem.__init__(self, config, maxCores, maxMemory) #Call the parent constructor self.lsfResultsFile = getParasolResultsFileName(config.jobStore) #Reset the job queue and results (initially, we do this again once we've killed the jobs) self.lsfResultsFileHandle = open(self.lsfResultsFile, 'w') self.lsfResultsFileHandle.close() #We lose any previous state in this file, and ensure the files existence self.currentjobs = set() self.obtainSystemConstants() self.jobIDs = dict() self.lsfJobIDs = dict() self.nextJobID = 0 self.newJobsQueue = Queue() self.updatedJobsQueue = Queue() self.worker = Worker(self.newJobsQueue, self.updatedJobsQueue, self) self.worker.setDaemon(True) self.worker.start()
def __init__(self, config, maxCores, maxMemory, maxDisk): AbstractBatchSystem.__init__(self, config, maxCores, maxMemory, maxDisk) self.gridengineResultsFile = getParasolResultsFileName(config.jobStore) # Reset the job queue and results (initially, we do this again once we've killed the jobs) self.gridengineResultsFileHandle = open(self.gridengineResultsFile, 'w') # We lose any previous state in this file, and ensure the files existence self.gridengineResultsFileHandle.close() self.currentJobs = set() self.maxCPU, self.maxMEM = self.obtainSystemConstants() self.nextJobID = 0 self.newJobsQueue = Queue() self.updatedJobsQueue = Queue() self.killQueue = Queue() self.killedJobsQueue = Queue() self.worker = Worker(self.newJobsQueue, self.updatedJobsQueue, self.killQueue, self.killedJobsQueue, self) self.worker.start()