def getDatasets(self): """ Get the datasets for the output files """ # Get the default dataset if self.__job.destinationDblock and self.__job.destinationDblock[ 0] != 'NULL' and self.__job.destinationDblock[0] != ' ': dsname = self.__job.destinationDblock[0] else: dsname = "%s-%s-%s" % (time.localtime()[0:3] ) # pass it a random name # Create the dataset dictionary # (if None, the dsname above will be used for all output files) datasetDict = getDatasetDict(self.__job.outFiles, self.__job.destinationDblock, self.__job.logFile, self.__job.logDblock) if datasetDict: tolog("Dataset dictionary has been verified: %s" % str(datasetDict)) else: tolog( "Dataset dictionary could not be verified, output files will go to: %s" % (dsname)) return dsname, datasetDict
def TransferFiles(job_state, datadir, files, **kwargs): """ Transfers files from list 'files' May change CWD with pUtil.chdir (several times) :param job_state: :param datadir: job data dir :param files: list of filenames :param kwargs: specific arguments for other purposes :return: """ job = job_state.job pUtil.chdir(datadir) XMLMetadata = pUtil.getMetadata(job_state.site.workdir, job.jobId) thisSite = DorE(kwargs, 'thisSite') if not setGuids(job_state, files, **kwargs): job.result[2] = PilotErrors().ERR_LOSTJOBPFC return ReturnCode.FailedJob outPFC = updateOutPFC(job, **kwargs) if not outPFC: return ReturnCode.FailedJob dsname = defaultDSname(job.destinationDblock) datasetDict = pUtil.getDatasetDict(job.outFiles, job.destinationDblock, job.logFile, job.logDblock) if not datasetDict: log("Output files will go to default dataset: %s" % (dsname)) # the cmtconfig is needed by at least the xrdcp site mover cmtconfig = pUtil.getCmtconfig(job.cmtconfig) tin_0 = os.times() rf = None _state = ReturnCode.OK _msg = "" ec = -1 try: # Note: alt stage-out numbers are not saved in recovery mode (job object not returned from this function) rc, pilotErrorDiag, rf, rs, job.filesNormalStageOut, job.filesAltStageOut, os_bucket_id = Mover.mover_put_data( "xmlcatalog_file:%s" % outPFC, dsname, thisSite.sitename, thisSite.computingElement, analysisJob=pUtil.isAnalysisJob(job.trf.split(",")[0]), proxycheck=DorE(kwargs, 'proxycheckFlag'), pinitdir=DorE(kwargs, 'pilot_initdir'), datasetDict=datasetDict, stageoutTries=DorE(kwargs, 'stageoutretry'), cmtconfig=cmtconfig, recoveryWorkDir=thisSite.workdir, job=job) except Exception, e: pilotErrorDiag = "Put function can not be called for staging out: %s" % str(e) log("!!%s!!1105!! %s" % (env['errorLabel'], pilotErrorDiag)) ec = PilotErrors().ERR_PUTFUNCNOCALL _state = ReturnCode.Holding _msg = env['errorLabel']
def getDatasets(job): """ get the datasets for the output files """ # get the default dataset if job.destinationDblock and job.destinationDblock[0] != 'NULL' and job.destinationDblock[0] != ' ': dsname = job.destinationDblock[0] else: dsname = "%s-%s-%s" % (time.localtime()[0:3]) # pass it a random name # create the dataset dictionary # (if None, the dsname above will be used for all output files) datasetDict = getDatasetDict(job.outFiles, job.destinationDblock, job.logFile, job.logDblock) if datasetDict: tolog("Dataset dictionary has been verified") else: tolog("Dataset dictionary could not be verified, output files will go to: %s" % (dsname)) return dsname, datasetDict