def run(self, fileStore): chunkList = [readGlobalFileWithoutCache(fileStore, fileID) for fileID in self.chunkIDList] #Docker expects paths relative to the work dir chunkList = [os.path.basename(chunk) for chunk in chunkList] outSequencePath = fileStore.getLocalTempFile() cactus_call(outfile=outSequencePath, stdin_string=" ".join(chunkList), parameters=["cactus_batch_mergeChunks"]) return fileStore.writeGlobalFile(outSequencePath)
def run(self, fileStore): logger.info("Results IDs: %s" % self.resultsFileIDs) resultsFiles = [readGlobalFileWithoutCache(fileStore, fileID) for fileID in self.resultsFileIDs] collatedResultsFile = fileStore.getLocalTempFile() catFiles(resultsFiles, collatedResultsFile) logger.info("Collated the alignments to the file: %s", collatedResultsFile) collatedResultsID = fileStore.writeGlobalFile(collatedResultsFile) for resultsFileID in self.resultsFileIDs: fileStore.deleteGlobalFile(resultsFileID) return collatedResultsID
def run(self, fileStore): logger.info("Results IDs: %s" % self.resultsFileIDs) resultsFiles = [ readGlobalFileWithoutCache(fileStore, fileID) for fileID in self.resultsFileIDs ] collatedResultsFile = fileStore.getLocalTempFile() catFiles(resultsFiles, collatedResultsFile) logger.info("Collated the alignments to the file: %s", collatedResultsFile) collatedResultsID = fileStore.writeGlobalFile(collatedResultsFile) for i in range(0, len(self.resultsFileIDs), self.delete_batch_size): self.addChild( DeleteFileIDs(self.resultsFileIDs[i:i + self.delete_batch_size])) return collatedResultsID