class RunCactusPreprocessorThenProgressiveDown2(RoundedJob): def __init__(self, options, project, event, schedule, memory=None, cores=None): RoundedJob.__init__(self, memory=memory, cores=cores, preemptable=True) self.options = options self.project = project self.event = event self.schedule = schedule def run(self, fileStore): self.configNode = ET.parse(fileStore.readGlobalFile(self.project.getConfigID())).getroot() self.configWrapper = ConfigWrapper(self.configNode) self.configWrapper.substituteAllPredefinedConstantsWithLiterals() # Save preprocessed sequences if self.options.intermediateResultsUrl is not None: preprocessedSequences = self.project.getOutputSequenceIDMap() for genome, seqID in preprocessedSequences.items(): fileStore.exportFile(seqID, self.options.intermediateResultsUrl + '-preprocessed-' + genome) # Log the stats for the preprocessed assemblies for name, sequence in self.project.getOutputSequenceIDMap().items(): self.addChildJobFn(logAssemblyStats, "After preprocessing", name, sequence) project = self.addChild(ProgressiveDown(options=self.options, project=self.project, event=self.event, schedule=self.schedule, memory=self.configWrapper.getDefaultMemory())).rv() #Combine the smaller HAL files from each experiment return self.addFollowOnJobFn(exportHal, project=project, memory=self.configWrapper.getDefaultMemory(), disk=self.configWrapper.getExportHalDisk(), preemptable=False).rv()