def testJob(self): """ Tests functions of a job. """ command = "by your command" memory = 2 ^ 32 disk = 2 ^ 32 cores = 1 preemptable = 1 jobStoreID = 100 remainingRetryCount = 5 predecessorNumber = 0 j = JobGraph(command=command, memory=memory, cores=cores, disk=disk, preemptable=preemptable, jobStoreID=jobStoreID, remainingRetryCount=remainingRetryCount, predecessorNumber=predecessorNumber, jobName='testJobGraph', unitName='noName') #Check attributes # self.assertEquals(j.command, command) self.assertEquals(j.memory, memory) self.assertEquals(j.disk, disk) self.assertEquals(j.cores, cores) self.assertEquals(j.preemptable, preemptable) self.assertEquals(j.jobStoreID, jobStoreID) self.assertEquals(j.remainingRetryCount, remainingRetryCount) self.assertEquals(j.predecessorNumber, predecessorNumber) self.assertEquals(j.stack, []) self.assertEquals(j.predecessorsFinished, set()) self.assertEquals(j.logJobStoreFileID, None) #Check equals function j2 = JobGraph(command=command, memory=memory, cores=cores, disk=disk, preemptable=preemptable, jobStoreID=jobStoreID, remainingRetryCount=remainingRetryCount, predecessorNumber=predecessorNumber, jobName='testJobGraph', unitName='noName') self.assertEquals(j, j2) #Change an attribute and check not equal j.predecessorsFinished = {"1", "2"} self.assertNotEquals(j, j2)
def __jobGraphFromRecord(self, record): attrs = self.__sanitize_record(record) attrs['predecessorsFinished'] = set(attrs['predecessorsFinished']) attrs['memory'] = int(attrs['memory']) attrs['disk'] = int(attrs['disk']) attrs['stack'] = cPickle.loads(attrs['stack']) attrs['chainedJobs'] = cPickle.loads(attrs['chainedJobs']) return JobGraph(**attrs)
def create(self, jobNode): jobStoreID = self._newJobID() log.debug("Creating job %s for '%s'", jobStoreID, '<no command>' if jobNode.command is None else jobNode.command) job = JobGraph.fromJobNode(jobNode, jobStoreID=jobStoreID, tryCount=self._defaultTryCount()) if hasattr(self, "_batchedJobGraphs") and self._batchedJobGraphs is not None: self._batchedJobGraphs.append(job) else: self._writeString(jobStoreID, pickle.dumps(job, protocol=pickle.HIGHEST_PROTOCOL)) # UPDATE: bz2.compress( return job
def create(self, jobNode): # The absolute path to the job directory. absJobDir = tempfile.mkdtemp(prefix="job", dir=self._getTempSharedDir()) # Sub directory to put temporary files associated with the job in os.mkdir(os.path.join(absJobDir, "g")) # Make the job job = JobGraph.fromJobNode(jobNode, jobStoreID=self._getRelativePath(absJobDir), tryCount=self._defaultTryCount()) # Write job file to disk self.update(job) return job
def testJob(self): """ Tests functions of a job. """ command = "by your command" memory = 2^32 disk = 2^32 cores = 1 preemptable = 1 jobStoreID = 100 remainingRetryCount = 5 predecessorNumber = 0 j = JobGraph(command=command, memory=memory, cores=cores, disk=disk, preemptable=preemptable, jobStoreID=jobStoreID, remainingRetryCount=remainingRetryCount, predecessorNumber=predecessorNumber, jobName='testJobGraph', unitName='noName') #Check attributes # self.assertEquals(j.command, command) self.assertEquals(j.memory, memory) self.assertEquals(j.disk, disk) self.assertEquals(j.cores, cores) self.assertEquals(j.preemptable, preemptable) self.assertEquals(j.jobStoreID, jobStoreID) self.assertEquals(j.remainingRetryCount, remainingRetryCount) self.assertEquals(j.predecessorNumber, predecessorNumber) self.assertEquals(j.stack, []) self.assertEquals(j.predecessorsFinished, set()) self.assertEquals(j.logJobStoreFileID, None) #Check equals function j2 = JobGraph(command=command, memory=memory, cores=cores, disk=disk, preemptable=preemptable, jobStoreID=jobStoreID, remainingRetryCount=remainingRetryCount, predecessorNumber=predecessorNumber, jobName='testJobGraph', unitName='noName') self.assertEquals(j, j2) #Change an attribute and check not equal j.predecessorsFinished = {"1", "2"} self.assertNotEquals(j, j2)
def create(self, jobNode): jobStoreID = self._newID() job = JobGraph(jobStoreID=jobStoreID, unitName=jobNode.name, jobName=jobNode.job, command=jobNode.command, remainingRetryCount=self._defaultTryCount(), logJobStoreFileID=None, predecessorNumber=jobNode.predecessorNumber, **jobNode._requirements) self._writeString(jobStoreID, cPickle.dumps(job)) return job
def create(self, jobNode): # The absolute path to the job directory. absJobDir = tempfile.mkdtemp(prefix="job", dir=self._getTempSharedDir()) # Sub directory to put temporary files associated with the job in os.mkdir(os.path.join(absJobDir, "g")) # Make the job job = JobGraph.fromJobNode(jobNode, jobStoreID=self._getRelativePath(absJobDir), tryCount=self._defaultTryCount()) if hasattr(self, "_batchedJobGraphs") and self._batchedJobGraphs is not None: self._batchedJobGraphs.append(job) else: self.update(job) return job
def create(self, jobNode): job = JobGraph.fromJobNode(jobNode, jobStoreID=self._newJobID(), tryCount=self._defaultTryCount()) try: with self.conn.cursor() as cur: attrs = job.__dict__ attrs['predecessorsFinished'] = list(attrs['predecessorsFinished']) attrs.pop('_config') self._insert_row('job_store', **attrs) self.conn.commit() except RuntimeError as e: # Handle known errors self.conn.rollback() raise e return job
def createJobGraph(memory, cores, disk, preemptable, checkpoint): """Create a fake-ish Job and JobGraph pair, and return the jobGraph.""" name = 'jobGraph%d' % self.jobGraphNumber self.jobGraphNumber += 1 job = Job() job.checkpoint = checkpoint with self.jobStore.writeFileStream() as (f, fileStoreID): pickle.dump(job, f, pickle.HIGHEST_PROTOCOL) command = '_toil %s fooCommand toil True' % fileStoreID jobGraph = JobGraph(command=command, memory=memory, cores=cores, disk=disk, unitName=name, jobName=name, preemptable=preemptable, jobStoreID=name, remainingRetryCount=1, predecessorNumber=1) return self.jobStore.create(jobGraph)
def create(self, jobNode): # Get the job's name. We want to group jobs with the same name together. # This will be e.g. the function name for wrapped-function jobs. # Make sure to render it filename-safe usefulFilename = self._makeStringFilenameSafe(jobNode.jobName) # Make a unique temp directory under a directory for this job name, # possibly sprayed across multiple levels of subdirectories. absJobDir = tempfile.mkdtemp(prefix=self.JOB_DIR_PREFIX, dir=self._getArbitraryJobsDirForName(usefulFilename)) # Make the job to save job = JobGraph.fromJobNode(jobNode, jobStoreID=self._getJobIdFromDir(absJobDir), tryCount=self._defaultTryCount()) if hasattr(self, "_batchedJobGraphs") and self._batchedJobGraphs is not None: # Save it later self._batchedJobGraphs.append(job) else: # Save it now self.update(job) return job