コード例 #1
0
 def create(self, jobNode):
     jobStoreID = self._newJobID()
     log.debug("Creating job %s for '%s'",
               jobStoreID, '<no command>' if jobNode.command is None else jobNode.command)
     job = JobGraph.fromJobNode(jobNode, jobStoreID=jobStoreID, tryCount=self._defaultTryCount())
     if hasattr(self, "_batchedJobGraphs") and self._batchedJobGraphs is not None:
         self._batchedJobGraphs.append(job)
     else:
         self._writeString(jobStoreID, pickle.dumps(job, protocol=pickle.HIGHEST_PROTOCOL))  # UPDATE: bz2.compress(
     return job
コード例 #2
0
ファイル: fileJobStore.py プロジェクト: Duke-GCB/toil
 def create(self, jobNode):
     # The absolute path to the job directory.
     absJobDir = tempfile.mkdtemp(prefix="job", dir=self._getTempSharedDir())
     # Sub directory to put temporary files associated with the job in
     os.mkdir(os.path.join(absJobDir, "g"))
     # Make the job
     job = JobGraph.fromJobNode(jobNode, jobStoreID=self._getRelativePath(absJobDir),
                                tryCount=self._defaultTryCount())
     # Write job file to disk
     self.update(job)
     return job
コード例 #3
0
 def create(self, jobNode):
     # The absolute path to the job directory.
     absJobDir = tempfile.mkdtemp(prefix="job", dir=self._getTempSharedDir())
     # Sub directory to put temporary files associated with the job in
     os.mkdir(os.path.join(absJobDir, "g"))
     # Make the job
     job = JobGraph.fromJobNode(jobNode, jobStoreID=self._getRelativePath(absJobDir),
                                tryCount=self._defaultTryCount())
     # Write job file to disk
     self.update(job)
     return job
コード例 #4
0
ファイル: fileJobStore.py プロジェクト: theferrit32/toil
 def create(self, jobNode):
     # The absolute path to the job directory.
     absJobDir = tempfile.mkdtemp(prefix="job", dir=self._getTempSharedDir())
     # Sub directory to put temporary files associated with the job in
     os.mkdir(os.path.join(absJobDir, "g"))
     # Make the job
     job = JobGraph.fromJobNode(jobNode, jobStoreID=self._getRelativePath(absJobDir),
                                tryCount=self._defaultTryCount())
     if hasattr(self, "_batchedJobGraphs") and self._batchedJobGraphs is not None:
         self._batchedJobGraphs.append(job)
     else:
         self.update(job)
     return job
コード例 #5
0
ファイル: fileJobStore.py プロジェクト: chapmanb/toil
 def create(self, jobNode):
     # The absolute path to the job directory.
     absJobDir = tempfile.mkdtemp(prefix="job", dir=self._getTempSharedDir())
     # Sub directory to put temporary files associated with the job in
     os.mkdir(os.path.join(absJobDir, "g"))
     # Make the job
     job = JobGraph.fromJobNode(jobNode, jobStoreID=self._getRelativePath(absJobDir),
                                tryCount=self._defaultTryCount())
     if hasattr(self, "_batchedJobGraphs") and self._batchedJobGraphs is not None:
         self._batchedJobGraphs.append(job)
     else:
         self.update(job)
     return job
コード例 #6
0
 def create(self, jobNode):
     job = JobGraph.fromJobNode(jobNode, jobStoreID=self._newJobID(), tryCount=self._defaultTryCount())
     try:
         with self.conn.cursor() as cur:
             attrs = job.__dict__
             attrs['predecessorsFinished'] = list(attrs['predecessorsFinished'])
             attrs.pop('_config')
             self._insert_row('job_store', **attrs)
             self.conn.commit()
     except RuntimeError as e:
         # Handle known errors
         self.conn.rollback()
         raise e
     return job
コード例 #7
0
ファイル: fileJobStore.py プロジェクト: glmxndr/toil
    def create(self, jobNode):
        # Get the job's name. We want to group jobs with the same name together.
        # This will be e.g. the function name for wrapped-function jobs.
        # Make sure to render it filename-safe
        usefulFilename = self._makeStringFilenameSafe(jobNode.jobName)

        # Make a unique temp directory under a directory for this job name,
        # possibly sprayed across multiple levels of subdirectories.
        absJobDir = tempfile.mkdtemp(prefix=self.JOB_DIR_PREFIX,
                                     dir=self._getArbitraryJobsDirForName(usefulFilename))
        # Make the job to save
        job = JobGraph.fromJobNode(jobNode, jobStoreID=self._getJobIdFromDir(absJobDir),
                                   tryCount=self._defaultTryCount())
        if hasattr(self, "_batchedJobGraphs") and self._batchedJobGraphs is not None:
            # Save it later
            self._batchedJobGraphs.append(job)
        else:
            # Save it now
            self.update(job)
        return job