def _initialize(self): """ gets the files to upload, check if to upload """ # lfnsList = self.__getOutputLFNs( self.outputData ) or outputList? if not self._checkWFAndStepStatus(): raise GracefulTermination('No output data upload attempted')
def _resolveInputVariables(self): """The module parameters are resolved here.""" super(UploadOutputs, self)._resolveInputVariables() # this comes from Job().setOutputData(). Typical for user jobs if "OutputData" in self.workflow_commons: self.outputData = self.workflow_commons["OutputData"] if isinstance(self.outputData, six.string_types): self.outputData = [ i.strip() for i in self.outputData.split(";") ] # if not present, we use the outputList, which is instead incrementally created based on the single step outputs # This is more typical for production jobs, that can have many steps linked one after the other elif "outputList" in self.workflow_commons: self.outputList = self.workflow_commons["outputList"] else: raise GracefulTermination("Nothing to upload") # in case you want to put a mask on the steps # TODO: add it to the DIRAC API if "outputDataStep" in self.workflow_commons: self.outputDataStep = self.workflow_commons["outputDataStep"] # this comes from Job().setOutputData(). Typical for user jobs if "OutputSE" in self.workflow_commons: specifiedSE = self.workflow_commons["OutputSE"] if not isinstance(specifiedSE, list): self.outputSE = [i.strip() for i in specifiedSE.split(";")] else: self.log.verbose("No OutputSE specified, using default value: %s" % (", ".join(self.defaultOutputSE))) # this comes from Job().setOutputData(). Typical for user jobs if "OutputPath" in self.workflow_commons: self.outputPath = self.workflow_commons["OutputPath"]
def _initialize(self): """checks if is to do, then prepare few things""" if not self._enableModule(): raise GracefulTermination("Skipping FailoverRequest module") self.request.RequestName = "job_%d_request.xml" % self.jobID self.request.JobID = self.jobID self.request.SourceComponent = "Job_%d" % self.jobID
def _resolveInputVariables(self): """ The module parameters are resolved here. """ super(UploadOutputs, self)._resolveInputVariables() # this comes from Job().setOutputData(). Typical for user jobs if self.workflow_commons.has_key('OutputData'): self.outputData = self.workflow_commons['OutputData'] if not isinstance(self.outputData, list): # type( userOutputData ) == type( [] ): self.outputData = [ i.strip() for i in self.outputData.split(';') ] # if not present, we use the outputList, which is instead incrementally created based on the single step outputs # This is more typical for production jobs, that can have many steps linked one after the other elif self.workflow_commons.has_key('outputList'): self.outputList = self.workflow_commons['outputList'] else: raise GracefulTermination('Nothing to upload') # in case you want to put a mask on the steps # TODO: add it to the DIRAC API if self.workflow_commons.has_key('outputDataStep'): self.outputDataStep = self.workflow_commons['outputDataStep'] # this comes from Job().setOutputData(). Typical for user jobs if self.workflow_commons.has_key('OutputSE'): specifiedSE = self.workflow_commons['OutputSE'] if not type(specifiedSE) == type([]): self.utputSE = [i.strip() for i in specifiedSE.split(';')] else: self.log.verbose('No OutputSE specified, using default value: %s' % (', '.join(self.defaultOutputSE))) self.outputSE = [] # this comes from Job().setOutputData(). Typical for user jobs if self.workflow_commons.has_key('OutputPath'): self.outputPath = self.workflow_commons['OutputPath']