def DataRemovalByQueryTSExample(args=None): from DIRAC.TransformationSystem.Client.Transformation import Transformation from DIRAC.TransformationSystem.Client.TransformationClient import TransformationClient t = Transformation() tc = TransformationClient() t.setTransformationName("DM_RemovalQuery1") # Must be unique #t.setTransformationGroup("Group1") t.setType("Removal") t.setPlugin("Standard") # Not needed. The default is 'Standard' t.setDescription("corsika Removal") t.setLongDescription("corsika Removal") # Mandatory t.setGroupSize( 2 ) # Here you specify how many files should be grouped within the same request, e.g. 100 t.setBody("Removal;RemoveFile" ) # Mandatory (the default is a ReplicateAndRegister operation) t.addTransformation() # Transformation is created here t.setStatus("Active") t.setAgentType("Automatic") transID = t.getTransformationID() tc.createTransformationInputDataQuery(transID['Value'], { 'particle': 'proton', 'prodName': 'ConfigTestTS9', 'outputType': 'Data' }) # Add files to Transformation based on Catalog Query
def DataReplicationByQueryTSExample(args=None): from DIRAC.TransformationSystem.Client.Transformation import Transformation from DIRAC.TransformationSystem.Client.TransformationClient import TransformationClient t = Transformation() tc = TransformationClient() t.setTransformationName("DM_ReplicationByQuery1") # This must vary #t.setTransformationGroup("Group1") t.setType("Replication") t.setSourceSE(['CYF-STORM-Disk', 'DESY-ZN-Disk' ]) # A list of SE where at least 1 SE is the valid one t.setTargetSE(['CEA-Disk']) t.setDescription("data Replication") t.setLongDescription("data Replication") #mandatory t.setGroupSize(1) t.setPlugin("Broadcast") t.addTransformation() #transformation is created here t.setStatus("Active") t.setAgentType("Automatic") transID = t.getTransformationID() tc.createTransformationInputDataQuery( transID['Value'], { 'particle': 'gamma', 'prodName': 'Config_test300113', 'outputType': 'Data', 'simtelArrayProdVersion': 'prod-2_21122012_simtel', 'runNumSeries': '0' }) # Add files to Transformation based on Catalog Query
def _createReplication(targetSE, sourceSE, prodID, datatype, extraname=''): """Creates the replication transformation based on the given parameters""" from DIRAC.TransformationSystem.Client.Transformation import Transformation from DIRAC.TransformationSystem.Client.TransformationClient import TransformationClient metadata = {"Datatype": datatype, "ProdID": prodID} trans = Transformation() transName = 'replicate_%s_%s' % (str(prodID), ",".join(targetSE)) if extraname: transName += "_%s" % extraname trans.setTransformationName(transName) description = 'Replicate files for prodID %s to %s' % (str(prodID), ",".join(targetSE)) trans.setDescription(description) trans.setLongDescription(description) trans.setType('Replication') trans.setGroup('Replication') trans.setPlugin('Broadcast') res = trans.setSourceSE(sourceSE) if not res['OK']: exit(1) res = trans.setTargetSE(targetSE) if not res['OK']: exit(1) res = trans.addTransformation() if not res['OK']: gLogger.error(res['Message']) exit(1) gLogger.verbose(res) trans.setStatus('Active') trans.setAgentType('Automatic') currtrans = trans.getTransformationID()['Value'] client = TransformationClient() res = client.createTransformationInputDataQuery(currtrans, metadata) if res['OK']: gLogger.always("Successfully created replication transformation") return S_OK() else: gLogger.error("Failure during replication creation", res['Message']) return S_ERROR("Failed to create transformation")
def createTransformation(self): ######################################## # Transformation definition ######################################## t = Transformation() t.setTransformationName(self.__stepName) t.setType(self.__transType) t.setDescription(self.__description) t.setLongDescription(self.__description) if self.__isGen: t.setMaxNumberOfTasks(self.__maxNumberOfTasks) else: t.setGroupSize(1) if self.__transGroup: t.setTransformationGroup(self.__transGroup) # set the job workflow to the transformation t.setBody(self.__job.workflow.toXML()) ######################################## # Transformation submission ######################################## res = t.addTransformation() if not res['OK']: raise Exception('Add transformation error: {0}'.format( res['Message'])) t.setStatus("Active") t.setAgentType("Automatic") currtrans = t.getTransformationID()['Value'] if self.__inputMeta and not self.__isGen: client = TransformationClient() res = client.createTransformationInputDataQuery( currtrans, self.__inputMeta) if not res['OK']: raise Exception( 'Create transformation query error: {0}'.format( res['Message'])) return str(currtrans)
def _createReplication( targetSE, sourceSE, prodID, datatype, extraname=''): """Creates the replication transformation based on the given parameters""" from DIRAC.TransformationSystem.Client.Transformation import Transformation from DIRAC.TransformationSystem.Client.TransformationClient import TransformationClient metadata = {"Datatype":datatype, "ProdID":prodID} trans = Transformation() transName = 'replicate_%s_%s' % ( str(prodID), ",".join(targetSE) ) if extraname: transName += "_%s" % extraname trans.setTransformationName( transName ) description = 'Replicate files for prodID %s to %s' % ( str(prodID), ",".join(targetSE) ) trans.setDescription( description ) trans.setLongDescription( description ) trans.setType( 'Replication' ) trans.setPlugin( 'Broadcast' ) res = trans.setSourceSE( sourceSE ) if not res['OK']: exit(1) res = trans.setTargetSE( targetSE ) if not res['OK']: exit(1) res = trans.addTransformation() if not res['OK']: gLogger.error(res['Message']) exit(1) gLogger.verbose(res) trans.setStatus( 'Active' ) trans.setAgentType( 'Automatic' ) currtrans = trans.getTransformationID()['Value'] client = TransformationClient() res = client.createTransformationInputDataQuery( currtrans, metadata ) if res['OK']: gLogger.always("Successfully created replication transformation") return S_OK() else: gLogger.error("Failure during replication creation", res['Message']) return S_ERROR("Failed to create transformation")
def applyInputDataQuery(self, metadata=None, prodid=None): """ Tell the production to update itself using the metadata query specified, i.e. submit new jobs if new files are added corresponding to same query. """ if not self.transfid and self.currtrans: self.transfid = self.currtrans.getTransformationID()['Value'] #pylint: disable=E1101 elif prodid: self.transfid = prodid if not self.transfid: print "Not transformation defined earlier" return S_ERROR("No transformation defined") if metadata: self.inputBKSelection = metadata client = TransformationClient() if not self.dryrun: res = client.createTransformationInputDataQuery( self.transfid, self.inputBKSelection) if not res['OK']: return res else: self.log.notice("Would use %s as metadata query for production" % str(self.inputBKSelection)) return S_OK()
from DIRAC.TransformationSystem.Client.Transformation import Transformation from DIRAC.TransformationSystem.Client.TransformationClient import TransformationClient from DIRAC.Interfaces.API.Job import Job j = Job() tc = TransformationClient() t = Transformation() t.setTransformationName("Un exemple") #<- unique t.setTransformationGroup("Un groupe") #<- for monitoring t.setType("MCSimulation")#type must be among known types t.setDescription("Ceci est un exemple") t.setLongDescription("C'est un bel exemple") t.setBody(j.workflow._toXML()) t.setGroupSize(1) t.setPlugin("Standard") t.addTransformation() #<-- transformation is created here t.setStatus("Active") #<-- make it start t.setAgentType("Automatic") #<-- should be by default transfid = t.getTransformationID()['Value'] #<- unique tc.createTransformationInputDataQuery(transfid, {'meta1':val1,"meta2":{">":34}})
def createMovingTransformation(targetSE, sourceSE, prodID, datatype, extraname='', forceMoving=False): """Creates the replication transformation based on the given parameters :param targetSE: Destination for files :type targetSE: python:list or str :param str sourceSE: Origin of files. Files will be removed from this SE :param int prodID: Production ID of files to be moved :param str datatype: DataType of files to be moved :param str extraname: addition to the transformation name, only needed if the same transformation was already created :param bool forceMoving: Move always, even if GEN/SIM files don't have descendents :returns: S_OK, S_ERROR """ metadata = {"Datatype": datatype, "ProdID": prodID} if isinstance(targetSE, basestring): targetSE = [targetSE] trans = Transformation() transName = 'Move_%s_%s_%s' % (datatype, str(prodID), ",".join(targetSE)) if extraname: transName += "_%s" % extraname trans.setTransformationName(transName) description = 'Move files for prodID %s to %s' % (str(prodID), ",".join(targetSE)) trans.setDescription(description) trans.setLongDescription(description) trans.setType('Replication') trans.setGroup('Moving') if datatype in ('GEN', 'SIM') and not forceMoving: trans.setPlugin('BroadcastProcessed') else: trans.setPlugin('Broadcast') transBody = [ ("ReplicateAndRegister", { "SourceSE": sourceSE, "TargetSE": targetSE }), ("RemoveReplica", { "TargetSE": sourceSE }), ] trans.setBody(transBody) res = trans.setSourceSE(sourceSE) if not res['OK']: return S_ERROR("SourceSE not valid: %s" % res['Message']) res = trans.setTargetSE(targetSE) if not res['OK']: return S_ERROR("TargetSE not valid: %s" % res['Message']) res = trans.addTransformation() if not res['OK']: gLogger.error("Failed to create Transformation", res['Message']) return res gLogger.verbose(res) trans.setStatus('Active') trans.setAgentType('Automatic') currtrans = trans.getTransformationID()['Value'] client = TransformationClient() res = client.createTransformationInputDataQuery(currtrans, metadata) if res['OK']: gLogger.always("Successfully created replication transformation") return S_OK() else: gLogger.error("Failure during replication creation", res['Message']) return S_ERROR("Failed to create transformation:%s " % res['Message'])
from DIRAC.TransformationSystem.Client.Transformation import Transformation from DIRAC.TransformationSystem.Client.TransformationClient import TransformationClient # Define transformation steps for the replication of the output data if replicateFiles and meta: Trans = Transformation() Trans.setTransformationName( 'replicate_%s_%s_%s_%s' % ( process, energy, polarisation, meta['Datatype'] ) ) description = 'Replicate %s %s %s %s to' % ( process, energy, polarisation, meta['Datatype'] ) for replicaSRM in replicaSRMs: description += ' %s,' % ( replicaSRM ) description.rstrip( ',' ) Trans.setDescription( description ) Trans.setLongDescription( description ) Trans.setType( 'Replication' ) Trans.setPlugin( 'Broadcast' ) Trans.setSourceSE( outputSRM ) Trans.setTargetSE( replicaSRMs ) res = Trans.addTransformation() if not res['OK']: print res sys.exit(0) print res Trans.setStatus( 'Active' ) Trans.setAgentType( 'Automatic' ) currtrans = Trans.getTransformationID()['Value'] client = TransformationClient() res = client.createTransformationInputDataQuery( currtrans, meta ) print res['OK']
print "Example file: %s" % lfns[0] answer = raw_input('Proceed and submit replication? (Y/N): ') if not answer.lower() in ('y', 'yes'): print "Canceled" exit(1) trc = TransformationClient() res = trc.getTransformationStats(name_of_replication) if res['OK']: print "Replication with name %s already exists! Cannot proceed." % name_of_replication exit(1) Trans = Transformation() Trans.setTransformationName(name_of_replication) Trans.setDescription(description) Trans.setLongDescription(description) Trans.setType('Replication') Trans.setPlugin('Broadcast') #Trans.setFileMask(fmask) Trans.setSourceSE(source) Trans.setTargetSE(destination) res = Trans.addTransformation() if not res['OK']: print "Failed to add Replication: %s" % res['Message'] exit(1) Trans.setStatus("Active") Trans.setAgentType("Automatic") currtrans = Trans.getTransformationID()['Value'] res = trc.createTransformationInputDataQuery(currtrans, meta)
class ProductionJob(Job): #pylint: disable=too-many-public-methods, too-many-instance-attributes """ Production job class. Suitable for CLIC studies. Need to sub class and overload for other clients. """ def __init__(self, script = None): super(ProductionJob, self).__init__( script ) self.prodVersion = __RCSID__ self.dryrun = False self.created = False self.checked = False self.call_finalization = False self.finalsdict = {} self.transfid = 0 self.type = 'Production' self.csSection = '/Production/Defaults' self.ops = Operations() self.fc = FileCatalogClient() self.trc = TransformationClient() self.defaultProdID = '12345' self.defaultProdJobID = '12345' self.jobFileGroupSize = 1 self.nbtasks = 1 self.slicesize =0 self.basename = '' self.basepath = self.ops.getValue('/Production/CLIC/BasePath','/ilc/prod/clic/') self.evttype = '' self.datatype = '' self.energycat = '' self.detector = '' self.currtrans = None self.description = '' self.finalpaths = [] self.finalMetaDict = defaultdict( dict ) self.prodMetaDict = {} self.finalMetaDictNonSearch = {} self.metadict_external = {} self.outputStorage = '' self.proxyinfo = getProxyInfo() self.inputdataquery = False self.inputBKSelection = {} self.plugin = 'Standard' self.prodGroup = '' self.prodTypes = ['MCGeneration', 'MCSimulation', 'Test', 'MCReconstruction', 'MCReconstruction_Overlay', 'Merge', 'Split', 'MCGeneration_ILD', 'MCSimulation_ILD', 'MCReconstruction_ILD', 'MCReconstruction_Overlay_ILD', 'Split_ILD' ] self.prodparameters = {} self.prodparameters['NbInputFiles'] = 1 self.prodparameters['nbevts'] = 0 #self.prodparameters["SWPackages"] = '' self._addParameter(self.workflow, "IS_PROD", 'JDL', True, "This job is a production job") if not script: self.__setDefaults() self._recBasePaths = {} self.maxFCFoldersToCheck = 100000 ############################################################################# def __setDefaults(self): """Sets some default parameters. """ self.setPlatform(self.ops.getValue('%s/Platform' % (self.csSection), 'x86_64-slc5-gcc43-opt')) self.setCPUTime('300000') self.setLogLevel('verbose') self.setJobGroup('@{PRODUCTION_ID}') #version control self._setParameter('productionVersion', 'string', self.prodVersion, 'ProdAPIVersion') #General workflow parameters self._setParameter('PRODUCTION_ID', 'string', self.defaultProdID.zfill(8), 'ProductionID') self._setParameter('JOB_ID', 'string', self.defaultProdJobID.zfill(8), 'ProductionJobID') self._setParameter('Priority', 'JDL', '1', 'Priority') self._setParameter('emailAddress', 'string', '*****@*****.**', 'CrashEmailAddress') def _setParameter(self, name, parameterType, parameterValue, description): """Set parameters checking in CS in case some defaults need to be changed. """ if self.ops.getValue('%s/%s' % (self.csSection, name), ''): LOG.debug('Setting %s from CS defaults = %s' % (name, self.ops.getValue('%s/%s' % (self.csSection, name)))) self._addParameter(self.workflow, name, parameterType, self.ops.getValue('%s/%s' % (self.csSection, name), 'default'), description) else: LOG.debug('Setting parameter %s = %s' % (name, parameterValue)) self._addParameter(self.workflow, name, parameterType, parameterValue, description) def setConfig(self,version): """ Define the Configuration package to obtain """ appName = 'ILDConfig' self._addSoftware(appName.lower(), version) self.prodparameters['ILDConfigVersion'] = version self._addParameter( self.workflow, 'ILDConfigPackage', 'JDL', appName+version, 'ILDConfig package' ) return S_OK() def setClicConfig(self, version): """Define the ClicConfig package to obtain.""" return self.setConfigPackage('ClicConfig', version) def setConfigPackage(self, appName, version): """Define the config package to obtain.""" self._addSoftware(appName.lower(), version) self._addParameter(self.workflow, appName + 'Package', 'JDL', appName + version, appName + 'package') self.prodparameters[appName + 'Version'] = version return S_OK() def setDryRun(self, run): """ In case one wants to get all the info as if the prod was being submitted """ self.dryrun = run ############################################################################# def setProdGroup(self, group): """ Sets a user defined tag for the production as appears on the monitoring page """ self.prodGroup = group ############################################################################# def setProdPlugin(self, plugin): """ Sets the plugin to be used to creating the production jobs """ self.plugin = plugin ############################################################################# def setJobFileGroupSize(self, files): """ Sets the number of files to be input to each job created. """ if self.checked: return self._reportError("This input is needed at the beginning of the production definition: it is \ needed for total number of evts.") self.jobFileGroupSize = files self.prodparameters['NbInputFiles'] = files def setNbEvtsPerSlice(self,nbevts): """ Define the number of events in a slice. """ self.slicesize = nbevts ############################################################################# def setProdType(self, prodType): """Set prod type. """ if prodType not in self.prodTypes: raise TypeError('Prod must be one of %s' % (', '.join(self.prodTypes))) self.setType(prodType) ############################################################################# def setWorkflowName(self, name): """Set workflow name. """ self.workflow.setName(name) self.name = name ############################################################################# def setWorkflowDescription(self, desc): """Set workflow name. """ self.workflow.setDescription(desc) ############################################################################# def createWorkflow(self): """ Create XML for local testing. """ name = '%s.xml' % self.name if os.path.exists(name): shutil.move(name,'%s.backup' % name) self.workflow.toXMLFile(name) ############################################################################# def setOutputSE(self, outputse): """ Define where the output file(s) will go. """ self.outputStorage = outputse return S_OK() ############################################################################# def setInputDataQuery(self, metadata): """ Define the input data query needed """ retMetaKey = self._checkMetaKeys( metadata.keys() ) if not retMetaKey['OK']: return retMetaKey if "ProdID" not in metadata: return self._reportError("Input metadata dictionary must contain at least a key 'ProdID' as reference") retDirs = self._checkFindDirectories( metadata ) if not retDirs['OK']: return retDirs dirs = retDirs['Value'].values() for mdir in dirs[:self.maxFCFoldersToCheck]: LOG.notice("Directory: %s" % mdir) res = self.fc.getDirectoryUserMetadata(mdir) if not res['OK']: return self._reportError("Error looking up the catalog for directory metadata") compatmeta = res['Value'] compatmeta.update(metadata) if 'EvtType' in compatmeta: self.evttype = JobHelpers.getValue( compatmeta['EvtType'], str, basestring ) else: return self._reportError("EvtType is not in the metadata, it has to be!") if 'NumberOfEvents' in compatmeta: self.nbevts = JobHelpers.getValue( compatmeta['NumberOfEvents'], int, None ) self.basename = self.evttype LOG.notice("MetaData: %s" % compatmeta) LOG.notice("MetaData: %s" % metadata) if "Energy" in compatmeta: self.energycat = JobHelpers.getValue( compatmeta["Energy"], str, (int, long, basestring) ) if self.energycat.count("tev"): self.energy = Decimal("1000.") * Decimal(self.energycat.split("tev")[0]) elif self.energycat.count("gev"): self.energy = Decimal("1.") * Decimal(self.energycat.split("gev")[0]) else: self.energy = Decimal("1.") * Decimal(self.energycat) gendata = False if 'Datatype' in compatmeta: self.datatype = JobHelpers.getValue( compatmeta['Datatype'], str, basestring ) if self.datatype == 'gen': gendata = True if "DetectorType" in compatmeta and not gendata: self.detector = JobHelpers.getValue( compatmeta["DetectorType"], str, basestring ) self.inputBKSelection = metadata self.inputdataquery = True self.prodparameters['nbevts'] = self.nbevts self.prodparameters["FCInputQuery"] = self.inputBKSelection return S_OK() def setDescription(self, desc): """ Set the production's description :param str desc: Description """ self.description = desc return S_OK() def getBasePath(self): """ Return the base path. Updated by :any:`setInputDataQuery`. """ return self.basepath def addFinalization(self, uploadData = False, registerData = False, uploadLog = False, sendFailover=False): """ Add finalization step :param bool uploadData: Upload or not the data to the storage :param bool uploadLog: Upload log file to storage (currently only available for admins, thus add them to OutputSandbox) :param bool sendFailover: Send Failover requests, and declare files as processed or unused in transfDB :param bool registerData: Register data in the file catalog """ #TODO: Do the registration only once, instead of once for each job self.call_finalization = True self.finalsdict['uploadData'] = uploadData self.finalsdict['registerData'] = registerData self.finalsdict['uploadLog'] = uploadLog self.finalsdict['sendFailover'] = sendFailover def _addRealFinalization(self): """ This is called at creation: now that the workflow is created at the last minute, we need to add this also at the last minute """ importLine = 'from ILCDIRAC.Workflow.Modules.<MODULE> import <MODULE>' dataUpload = ModuleDefinition('UploadOutputData') dataUpload.setDescription('Uploads the output data') self._addParameter(dataUpload, 'enable', 'bool', False, 'EnableFlag') body = importLine.replace('<MODULE>', 'UploadOutputData') dataUpload.setBody(body) failoverRequest = ModuleDefinition('FailoverRequest') failoverRequest.setDescription('Sends any failover requests') self._addParameter(failoverRequest, 'enable', 'bool', False, 'EnableFlag') body = importLine.replace('<MODULE>', 'FailoverRequest') failoverRequest.setBody(body) registerdata = ModuleDefinition('RegisterOutputData') registerdata.setDescription('Module to add in the metadata catalog the relevant info about the files') self._addParameter(registerdata, 'enable', 'bool', False, 'EnableFlag') body = importLine.replace('<MODULE>', 'RegisterOutputData') registerdata.setBody(body) logUpload = ModuleDefinition('UploadLogFile') logUpload.setDescription('Uploads the output log files') self._addParameter(logUpload, 'enable', 'bool', False, 'EnableFlag') body = importLine.replace('<MODULE>', 'UploadLogFile') logUpload.setBody(body) errorReport = ModuleDefinition('ReportErrors') errorReport.setDescription('Reports errors at the end') body = importLine.replace('<MODULE>', 'ReportErrors') errorReport.setBody(body) finalization = StepDefinition('Job_Finalization') finalization.addModule(dataUpload) up = finalization.createModuleInstance('UploadOutputData', 'dataUpload') up.setValue("enable", self.finalsdict['uploadData']) finalization.addModule(registerdata) ro = finalization.createModuleInstance('RegisterOutputData', 'RegisterOutputData') ro.setValue("enable", self.finalsdict['registerData']) finalization.addModule(logUpload) ul = finalization.createModuleInstance('UploadLogFile', 'logUpload') ul.setValue("enable", self.finalsdict['uploadLog']) finalization.addModule(failoverRequest) fr = finalization.createModuleInstance('FailoverRequest', 'failoverRequest') fr.setValue("enable", self.finalsdict['sendFailover']) finalization.addModule(errorReport) fr = finalization.createModuleInstance('ReportErrors', 'reportErrors') self.workflow.addStep(finalization) self.workflow.createStepInstance('Job_Finalization', 'finalization') return S_OK() def createProduction(self, name = None): """ Create production. """ if not self.proxyinfo['OK']: return S_ERROR("Not allowed to create production, you need a production proxy.") if 'groupProperties' not in self.proxyinfo['Value']: return S_ERROR("Could not determine groupProperties, you do not have the right proxy.") groupProperties = self.proxyinfo['Value']['groupProperties'] if 'ProductionManagement' not in groupProperties: return S_ERROR("Not allowed to create production, you need a production proxy.") if self.created: return S_ERROR("Production already created.") ###We need to add the applications to the workflow res = self._addToWorkflow() if not res['OK']: return res if self.call_finalization: self._addRealFinalization() workflowName = self.workflow.getName() fileName = '%s.xml' % workflowName LOG.verbose('Workflow XML file name is:', '%s' % fileName) try: self.createWorkflow() except Exception as x: LOG.error("Exception creating workflow", repr(x)) return S_ERROR('Could not create workflow') with open(fileName, 'r') as oFile: workflowXML = oFile.read() if not name: name = workflowName res = self.trc.getTransformationStats(name) if res['OK']: return self._reportError("Transformation with name %s already exists! Cannot proceed." % name) ###Create Tranformation Trans = Transformation() Trans.setTransformationName(name) Trans.setDescription(self.description) Trans.setLongDescription(self.description) Trans.setType(self.type) self.prodparameters['JobType'] = self.type Trans.setPlugin(self.plugin) if self.inputdataquery: Trans.setGroupSize(self.jobFileGroupSize) Trans.setTransformationGroup(self.prodGroup) Trans.setBody(workflowXML) if not self.slicesize: Trans.setEventsPerTask(self.jobFileGroupSize * self.nbevts) else: Trans.setEventsPerTask(self.slicesize) self.currtrans = Trans if self.dryrun: LOG.notice('Would create prod called', name) self.transfid = 12345 else: res = Trans.addTransformation() if not res['OK']: LOG.error(res['Message']) return res self.transfid = Trans.getTransformationID()['Value'] if self.inputBKSelection: res = self.applyInputDataQuery() if not self.dryrun: Trans.setAgentType("Automatic") Trans.setStatus("Active") finals = [] for finalpaths in self.finalpaths: finalpaths = finalpaths.rstrip("/") finalpaths += "/"+str(self.transfid).zfill(8) finals.append(finalpaths) self.finalMetaDict[finalpaths].update( { "ProdID": self.transfid } ) self.finalMetaDict[finalpaths].update( self.prodMetaDict ) # if 'ILDConfigVersion' in self.prodparameters: # self.finalMetaDict[finalpaths].update({"ILDConfig":self.prodparameters['ILDConfigVersion']}) if self.nbevts: self.finalMetaDict[finalpaths].update({'NumberOfEvents' : self.jobFileGroupSize * self.nbevts}) self.finalpaths = finals self.created = True return S_OK() def setNbOfTasks(self, nbtasks): """ Define the number of tasks you want. Useful for generation jobs. """ if not self.currtrans: LOG.error("Not transformation defined earlier") return S_ERROR("No transformation defined") if self.inputBKSelection and self.plugin not in ['Limited', 'SlicedLimited']: LOG.error('Metadata selection activated, should not specify the number of jobs') return S_ERROR() self.nbtasks = nbtasks self.currtrans.setMaxNumberOfTasks(self.nbtasks) #pylint: disable=E1101 return S_OK() def applyInputDataQuery(self, metadata = None, prodid = None): """ Tell the production to update itself using the metadata query specified, i.e. submit new jobs if new files are added corresponding to same query. """ if not self.transfid and self.currtrans: self.transfid = self.currtrans.getTransformationID()['Value'] #pylint: disable=E1101 elif prodid: self.transfid = prodid if not self.transfid: LOG.error("Not transformation defined earlier") return S_ERROR("No transformation defined") if metadata: self.inputBKSelection = metadata if not self.dryrun: res = self.trc.createTransformationInputDataQuery(self.transfid, self.inputBKSelection) if not res['OK']: return res else: LOG.notice("Would use %s as metadata query for production" % str(self.inputBKSelection)) return S_OK() def addMetadataToFinalFiles(self, metadict): """ Add additionnal non-query metadata """ self.metadict_external = metadict return S_OK() def finalizeProd(self, prodid = None, prodinfo = None): """ Finalize definition: submit to Transformation service and register metadata """ currtrans = 0 if self.currtrans: if not self.dryrun: currtrans = self.currtrans.getTransformationID()['Value'] #pylint: disable=E1101 else: currtrans = 12345 if prodid: currtrans = prodid if not currtrans: LOG.error("Not transformation defined earlier") return S_ERROR("No transformation defined") if prodinfo: self.prodparameters = prodinfo info = [] info.append('%s Production %s has following parameters:\n' % (self.prodparameters['JobType'], currtrans)) if "Process" in self.prodparameters: info.append('- Process %s' % self.prodparameters['Process']) if "Energy" in self.prodparameters: info.append('- Energy %s GeV' % self.prodparameters["Energy"]) if not self.slicesize: self.prodparameters['nbevts'] = self.jobFileGroupSize * self.nbevts else: self.prodparameters['nbevts'] = self.slicesize if self.prodparameters['nbevts']: info.append("- %s events per job" % (self.prodparameters['nbevts'])) if self.prodparameters.get('lumi', False): info.append(' corresponding to a luminosity %s fb' % (self.prodparameters['lumi'] * \ self.prodparameters['NbInputFiles'])) if 'FCInputQuery' in self.prodparameters: info.append('Using InputDataQuery :') for key, val in self.prodparameters['FCInputQuery'].iteritems(): info.append(' %s = %s' % (key, val)) if "SWPackages" in self.prodparameters: info.append('- SW packages %s' % self.prodparameters["SWPackages"]) if "SoftwareTag" in self.prodparameters: info.append('- SW tags %s' % self.prodparameters["SoftwareTag"]) if "ILDConfigVersion" in self.prodparameters: info.append('- ILDConfig %s' % self.prodparameters['ILDConfigVersion']) if 'ClicConfigVersion' in self.prodparameters: info.append('- ClicConfig %s' % self.prodparameters['ClicConfigVersion'] ) if 'extraCLIArguments' in self.prodparameters: info.append('- ExtraCLIArguments %s' % self.prodparameters['extraCLIArguments'] ) # as this is the very last call all applications are registered, so all software packages are known #add them the the metadata registration for finalpath in self.finalpaths: if finalpath not in self.finalMetaDictNonSearch: self.finalMetaDictNonSearch[finalpath] = {} if "SWPackages" in self.prodparameters: self.finalMetaDictNonSearch[finalpath]["SWPackages"] = self.prodparameters["SWPackages"] if self.metadict_external: self.finalMetaDictNonSearch[finalpath].update(self.metadict_external) info.append('- Registered metadata: ') for path, metadata in sorted( self.finalMetaDict.iteritems() ): info.append(' %s = %s' % (path, metadata)) info.append('- Registered non searchable metadata: ') for path, metadata in sorted( self.finalMetaDictNonSearch.iteritems() ): info.append(' %s = %s' % (path, metadata)) infoString = '\n'.join(info) self.prodparameters['DetailedInfo'] = infoString for name, val in self.prodparameters.iteritems(): result = self._setProdParameter(currtrans, name, val) if not result['OK']: LOG.error(result['Message']) res = self._registerMetadata() if not res['OK']: LOG.error('Could not register the following directories:', res['Message']) return res return S_OK() def _createDirectory(self, path, failed, mode=0o775): """Create the directory at path if it does not exist. :param str path: path to check :param list failed: list of failed paths :param int mode: mode to set for directory """ exists = returnSingleResult(self.fc.isDirectory(path)) if exists['OK'] and exists['Value']: LOG.verbose('Directory already exists:', path) return S_OK() result = returnSingleResult(self.fc.createDirectory(path)) if not result['OK']: LOG.error('Failed to create directory:', '%s: %s' % (path, result['Message'])) failed[path].append(result['Message']) return S_ERROR() LOG.verbose('Successfully created directory:', path) res = self.fc.changePathMode({path: mode}, False) if not res['OK']: LOG.error(res['Message']) failed[path].append(res['Message']) return S_ERROR() LOG.verbose('Successfully changed mode:', path) return S_OK() def _checkMetadata(self, path, metaCopy): """Get existing metadata, if it is the same do not set it again, otherwise return error.""" existingMetadata = self.fc.getDirectoryUserMetadata(path.rstrip('/')) if not existingMetadata['OK']: return S_OK() failure = False for key, value in existingMetadata['Value'].iteritems(): if key in metaCopy and metaCopy[key] != value: LOG.error('Metadata values for folder %s disagree for key %s: Existing(%r), new(%r)' % (path, key, value, metaCopy[key])) failure = True elif key in metaCopy and metaCopy[key] == value: LOG.verbose('Meta entry is unchanged', '%s = %s' % (key, value)) metaCopy.pop(key, None) if failure: return S_ERROR('Error when setting new metadata, already existing metadata disagrees!') return S_OK() def _registerMetadata(self): """Set metadata for given folders. Register path and metadata before the production actually runs. This allows for the definition of the full chain in 1 go. """ prevent_registration = self.ops.getValue('Production/PreventMetadataRegistration', False) if self.dryrun or prevent_registration: LOG.notice('Would have created and registered the following\n', '\n '.join([' * %s: %s' % (fPath, val) for fPath, val in self.finalMetaDict.iteritems()])) LOG.notice('Would have set this as non searchable metadata', str(self.finalMetaDictNonSearch)) return S_OK() failed = defaultdict(list) for path, meta in sorted(self.finalMetaDict.items()): res = self._createDirectory(path, failed) if not res['OK']: continue LOG.verbose('Checking to set metadata:', meta) metaCopy = dict(meta) res = self._checkMetadata(path, metaCopy) if not res['OK']: return res if not metaCopy: LOG.verbose('No new metadata to set') continue LOG.verbose('Setting metadata information: ', '%s: %s' % (path, metaCopy)) result = self.fc.setMetadata(path.rstrip('/'), metaCopy) if not result['OK']: LOG.error('Could not preset metadata', str(metaCopy)) LOG.error('Could not preset metadata', result['Message']) failed[path].append(result['Message']) for path, meta in sorted(self.finalMetaDictNonSearch.items()): res = self._createDirectory(path, failed) if not res['OK']: continue LOG.verbose('Setting non searchable metadata information: ', '%s: %s' % (path, meta)) result = self.fc.setMetadata(path.rstrip('/'), meta) if not result['OK']: LOG.error('Could not preset non searchable metadata', str(meta)) LOG.error('Could not preset non searchable metadata', result['Message']) failed[path].append(result['Message']) if failed: return S_ERROR('Failed to register some metadata: %s' % dict(failed)) return S_OK() def getMetadata(self): """ Return the corresponding metadata of the last step """ metadict = {} for meta in self.finalMetaDict.values(): metadict.update(meta) if 'NumberOfEvents' in metadict: del metadict['NumberOfEvents'] #As this is not supposed to be a searchable thing return metadict def _setProdParameter(self, prodID, pname, pvalue): """ Set a production parameter. """ if isinstance( pvalue, list ): pvalue = '\n'.join(pvalue) if isinstance( pvalue, (int, long) ): pvalue = str(pvalue) if not self.dryrun: result = self.trc.setTransformationParameter(int(prodID), str(pname), str(pvalue)) if not result['OK']: LOG.error('Problem setting parameter %s for production %s and value:\n%s' % (prodID, pname, pvalue)) else: LOG.notice("Adding %s=%s to transformation" % (str(pname), str(pvalue))) result = S_OK() return result def _jobSpecificParams(self, application): """ For production additional checks are needed: ask the user """ if self.created: return S_ERROR("The production was created, you cannot add new applications to the job.") if not application.logFile: logf = application.appname + "_" + application.version + "_@{STEP_ID}.log" res = application.setLogFile(logf) if not res['OK']: return res #in fact a bit more tricky as the log files have the prodID and jobID in them ### Retrieve from the application the essential info to build the prod info. if not self.nbevts and not self.slicesize: self.nbevts = application.numberOfEvents if not self.nbevts: return S_ERROR("Number of events to process is not defined.") elif not application.numberOfEvents: if not self.slicesize: res = application.setNumberOfEvents(self.jobFileGroupSize * self.nbevts) else: res = application.setNumberOfEvents(self.slicesize) if not res['OK']: return res if application.numberOfEvents > 0 and (self.jobFileGroupSize * self.nbevts > application.numberOfEvents or self.slicesize > application.numberOfEvents): self.nbevts = application.numberOfEvents if not self.energy: if application.energy: self.energy = Decimal((("%1.2f" % float(application.energy)).rstrip('0').rstrip('.'))) else: return S_ERROR("Could not find the energy defined, it is needed for the production definition.") elif not application.energy: res = application.setEnergy(float(self.energy)) if not res['OK']: return res if self.energy: self._setParameter( "Energy", "float", float(self.energy), "Energy used") self.prodparameters["Energy"] = float(self.energy) if not self.evttype: if hasattr(application, 'eventType'): self.evttype = application.eventType else: return S_ERROR("Event type not found nor specified, it's mandatory for the production paths.") self.prodparameters['Process'] = self.evttype if not self.outputStorage: return S_ERROR("You need to specify the Output storage element") curpackage = "%s.%s" % (application.appname, application.version) if "SWPackages" in self.prodparameters: if not self.prodparameters["SWPackages"].count(curpackage): self.prodparameters["SWPackages"] += ";%s" % ( curpackage ) else : self.prodparameters["SWPackages"] = "%s" % (curpackage) if not application.accountInProduction: res = self._updateProdParameters(application) if not res['OK']: return res self.checked = True return S_OK() res = application.setOutputSE(self.outputStorage) if not res['OK']: return res energypath = self.getEnergyPath() if not self.basename: self.basename = self.evttype evttypepath = '' if not self.evttype[-1] == '/': evttypepath = self.evttype + '/' path = self.basepath ###Need to resolve file names and paths if self.energy: self.finalMetaDict[self.basepath + energypath] = {"Energy":str(self.energy)} if hasattr(application, "setOutputRecFile") and not application.willBeCut: evtPath = self.basepath + energypath + evttypepath self.finalMetaDict[evtPath] = {'EvtType': self.evttype} detPath = evtPath + application.detectortype self.finalMetaDict[detPath] = {'DetectorType': application.detectortype} if application.keepRecFile: path = self.basepath + energypath + evttypepath + application.detectortype + '/REC' self.finalMetaDict[path] = {'Datatype': 'REC'} fname = self.basename + '_rec.slcio' application.setOutputRecFile(fname, path) LOG.info('Will store the files under', path) self.finalpaths.append(path) path = self.basepath + energypath + evttypepath + application.detectortype + '/DST' self.finalMetaDict[path] = {'Datatype': 'DST'} fname = self.basename + '_dst.slcio' application.setOutputDstFile(fname, path) LOG.info('Will store the files under', path) self.finalpaths.append(path) elif hasattr(application, "outputFile") and hasattr(application, 'datatype') and not application.outputFile and not application.willBeCut: path = self.basepath + energypath + evttypepath self.finalMetaDict[path] = {"EvtType" : self.evttype} if hasattr(application, "detectortype"): if application.detectortype: path += application.detectortype self.finalMetaDict[path] = {"DetectorType" : application.detectortype} path += '/' elif self.detector: path += self.detector self.finalMetaDict[path] = {"DetectorType" : self.detector} path += '/' if not application.datatype and self.datatype: application.datatype = self.datatype path += application.datatype self.finalMetaDict[path] = {'Datatype' : application.datatype} LOG.info("Will store the files under", "%s" % path) self.finalpaths.append(path) extension = 'stdhep' if application.datatype in ['SIM', 'REC']: extension = 'slcio' fname = self.basename + "_%s" % (application.datatype.lower()) + "." + extension application.setOutputFile(fname, path) self.basepath = path res = self._updateProdParameters(application) if not res['OK']: return res self.checked = True return S_OK() def _updateProdParameters(self, application): """ Update the prod parameters stored in the production parameters visible from the web """ try: self.prodparameters.update(application.prodparameters) except Exception as x: return S_ERROR("Exception: %r" % x ) if hasattr( application, 'extraCLIArguments' ) and application.extraCLIArguments: self.prodparameters['extraCLIArguments'] = repr(application.extraCLIArguments) return S_OK() def _jobSpecificModules(self, application, step): return application._prodjobmodules(step) def getEnergyPath(self): """returns the energy path 250gev or 3tev or 1.4tev etc.""" energy = Decimal(str(self.energy)) tD = Decimal('1000.0') unit = 'gev' if energy < tD else 'tev' energy = energy if energy < tD else energy/tD energyPath = ("%1.2f" % energy).rstrip('0').rstrip('.') energyPath = energyPath+unit+'/' LOG.info("Energy path is: ", energyPath) return energyPath def _checkMetaKeys( self, metakeys, extendFileMeta=False ): """ check if metadata keys are allowed to be metadata :param list metakeys: metadata keys for production metadata :param bool extendFileMeta: also use FileMetaFields for checking meta keys :returns: S_OK, S_ERROR """ res = self.fc.getMetadataFields() if not res['OK']: LOG.error("Could not contact File Catalog") return S_ERROR("Could not contact File Catalog") metaFCkeys = res['Value']['DirectoryMetaFields'].keys() if extendFileMeta: metaFCkeys.extend( res['Value']['FileMetaFields'].keys() ) for key in metakeys: for meta in metaFCkeys: if meta != key and meta.lower() == key.lower(): return self._reportError("Key syntax error %r, should be %r" % (key, meta), name = self.__class__.__name__) if key not in metaFCkeys: return self._reportError("Key %r not found in metadata keys, allowed are %r" % (key, metaFCkeys)) return S_OK() def _checkFindDirectories( self, metadata ): """ find directories by metadata and check that there are directories found :param dict metadata: metadata dictionary :returns: S_OK, S_ERROR """ res = self.fc.findDirectoriesByMetadata(metadata) if not res['OK']: return self._reportError("Error looking up the catalog for available directories") elif len(res['Value']) < 1: return self._reportError('Could not find any directories corresponding to the query issued') return res def setReconstructionBasePaths( self, recPath, dstPath ): """ set the output Base paths for the reconstruction REC and DST files """ self._recBasePaths['REC'] = recPath self._recBasePaths['DST'] = dstPath
def createDataTransformation(flavour, targetSE, sourceSE, metaKey, metaValue, extraData=None, extraname='', groupSize=1, plugin='Broadcast', tGroup=None, tBody=None, enable=False, ): """Creates the replication transformation based on the given parameters. :param str flavour: Flavour of replication to create: Replication or Moving :param targetSE: Destination for files :type targetSE: python:list or str :param str sourceSE: Origin of files. :param int metaKey: Meta key to identify input files :param int metaValue: Meta value to identify input files :param dict metaData: Additional meta data to use to identify input files :param str extraname: addition to the transformation name, only needed if the same transformation was already created :param int groupSize: number of files per transformation taks :param str plugin: plugin to use :param str tGroup: transformation group to set :param tBody: transformation body to set :param bool enable: if true submit the transformation, otherwise dry run :returns: S_OK (with the transformation object, if successfully added), S_ERROR """ metadata = {metaKey: metaValue} if isinstance(extraData, dict): metadata.update(extraData) gLogger.debug("Using %r for metadata search" % metadata) if isinstance(targetSE, basestring): targetSE = [targetSE] if flavour not in ('Replication', 'Moving'): return S_ERROR('Unsupported flavour %s' % flavour) transVerb = {'Replication': 'Replicate', 'Moving': 'Move'}[flavour] transGroup = {'Replication': 'Replication', 'Moving': 'Moving'}[flavour] if not tGroup else tGroup trans = Transformation() transName = '%s_%s_%s' % (transVerb, str(metaValue), ",".join(targetSE)) if extraname: transName += "_%s" % extraname trans.setTransformationName(transName) description = '%s files for %s %s to %s' % (transVerb, metaKey, str(metaValue), ",".join(targetSE)) trans.setDescription(description) trans.setLongDescription(description) trans.setType('Replication') trans.setTransformationGroup(transGroup) trans.setGroupSize(groupSize) trans.setPlugin(plugin) transBody = {'Moving': [("ReplicateAndRegister", {"SourceSE": sourceSE, "TargetSE": targetSE}), ("RemoveReplica", {"TargetSE": sourceSE})], 'Replication': '', # empty body }[flavour] if tBody is None else tBody trans.setBody(transBody) if sourceSE: res = trans.setSourceSE(sourceSE) if not res['OK']: return S_ERROR("SourceSE not valid: %s" % res['Message']) res = trans.setTargetSE(targetSE) if not res['OK']: return S_ERROR("TargetSE not valid: %s" % res['Message']) if not enable: gLogger.always("Dry run, not creating transformation") return S_OK() res = trans.addTransformation() if not res['OK']: return res gLogger.verbose(res) trans.setStatus('Active') trans.setAgentType('Automatic') currtrans = trans.getTransformationID()['Value'] client = TransformationClient() res = client.createTransformationInputDataQuery(currtrans, metadata) if not res['OK']: return res gLogger.always("Successfully created replication transformation") return S_OK(trans)
print("Example file: %s" % lfns[0]) answer = raw_input('Proceed and submit replication? (Y/N): ') if not answer.lower() in ('y', 'yes'): print("Canceled") exit(1) trc = TransformationClient() res = trc.getTransformationStats(name_of_replication) if res['OK']: print("Replication with name %s already exists! Cannot proceed." % name_of_replication) exit(1) Trans = Transformation() Trans.setTransformationName(name_of_replication) Trans.setDescription(description) Trans.setLongDescription(description) Trans.setType('Replication') Trans.setPlugin('Broadcast') #Trans.setFileMask(fmask) Trans.setSourceSE(source) Trans.setTargetSE(destination) res = Trans.addTransformation() if not res['OK']: print("Failed to add Replication: %s" % res['Message']) exit(1) Trans.setStatus("Active") Trans.setAgentType("Automatic") currtrans = Trans.getTransformationID()['Value'] res = trc.createTransformationInputDataQuery(currtrans,meta)
def createDataTransformation( flavour, targetSE, sourceSE, metaKey, metaValue, extraData=None, extraname='', groupSize=1, plugin=None, tGroup=None, tBody=None, enable=False, ): """Creates the replication transformation based on the given parameters. :param str flavour: Flavour of replication to create: Replication or Moving :param targetSE: Destination for files :type targetSE: python:list or str :param str sourceSE: Origin of files. :param int metaKey: Meta key to identify input files :param int metaValue: Meta value to identify input files :param dict metaData: Additional meta data to use to identify input files :param str extraname: addition to the transformation name, only needed if the same transformation was already created :param int groupSize: number of files per transformation taks :param str plugin: plugin to use :param str tGroup: transformation group to set :param tBody: transformation body to set :param bool enable: if true submit the transformation, otherwise dry run :returns: S_OK (with the transformation object, if successfully added), S_ERROR """ metadata = {metaKey: metaValue} if isinstance(extraData, dict): metadata.update(extraData) gLogger.debug("Using %r for metadata search" % metadata) if isinstance(targetSE, basestring): targetSE = [targetSE] if sourceSE and plugin is None: plugin = 'Broadcast' if plugin is None: plugin = 'Standard' gLogger.debug('Using plugin: %r' % plugin) if flavour not in ('Replication', 'Moving'): return S_ERROR('Unsupported flavour %s' % flavour) transVerb = {'Replication': 'Replicate', 'Moving': 'Move'}[flavour] transGroup = { 'Replication': 'Replication', 'Moving': 'Moving' }[flavour] if not tGroup else tGroup trans = Transformation() transName = '%s_%s_%s' % (transVerb, str(metaValue), ",".join(targetSE)) if extraname: transName += "_%s" % extraname trans.setTransformationName(transName) description = '%s files for %s %s to %s' % ( transVerb, metaKey, str(metaValue), ",".join(targetSE)) trans.setDescription(description) trans.setLongDescription(description) trans.setType('Replication') trans.setTransformationGroup(transGroup) trans.setGroupSize(groupSize) trans.setPlugin(plugin) transBody = { 'Moving': [("ReplicateAndRegister", { "SourceSE": sourceSE, "TargetSE": targetSE }), ("RemoveReplica", { "TargetSE": sourceSE })], 'Replication': '', # empty body }[flavour] if tBody is None else tBody trans.setBody(transBody) if sourceSE: res = trans.setSourceSE(sourceSE) if not res['OK']: return S_ERROR("SourceSE not valid: %s" % res['Message']) res = trans.setTargetSE(targetSE) if not res['OK']: return S_ERROR("TargetSE not valid: %s" % res['Message']) if not enable: gLogger.always("Dry run, not creating transformation") return S_OK() res = trans.addTransformation() if not res['OK']: return res gLogger.verbose(res) trans.setStatus('Active') trans.setAgentType('Automatic') currtrans = trans.getTransformationID()['Value'] client = TransformationClient() res = client.createTransformationInputDataQuery(currtrans, metadata) if not res['OK']: return res gLogger.always("Successfully created replication transformation") return S_OK(trans)
# Define transformation steps for the replication of the output data if replicateFiles and meta: Trans = Transformation() Trans.setTransformationName( 'replicate_%s_%s_%s_%s' % (process, energy, polarisation, meta['Datatype'])) description = 'Replicate %s %s %s %s to' % (process, energy, polarisation, meta['Datatype']) for replicaSRM in replicaSRMs: description += ' %s,' % (replicaSRM) description.rstrip(',') Trans.setDescription(description) Trans.setLongDescription(description) Trans.setType('Replication') Trans.setPlugin('Broadcast') Trans.setSourceSE(outputSRM) Trans.setTargetSE(replicaSRMs) res = Trans.addTransformation() if not res['OK']: print res sys.exit(0) print res Trans.setStatus('Active') Trans.setAgentType('Automatic') currtrans = Trans.getTransformationID()['Value'] client = TransformationClient() res = client.createTransformationInputDataQuery(currtrans, meta) print res['OK']
) # Here you specify how many files should be grouped within he same request, e.g. 100 transBody = '' t.setBody(transBody) if sourceSE: res = t.setSourceSE(sourceSE) if not res['OK']: gLogger.error("SourceSE not valid: %s" % res['Message']) exit(1) res = t.setTargetSE(targetSE) if not res['OK']: gLogger.error("TargetSE not valid: %s" % res['Message']) exit(1) result = t.addTransformation() # Transformation is created here if not result['OK']: gLogger.error('Can not add transformation: %s' % result['Message']) exit(2) t.setStatus("Active") t.setAgentType("Automatic") transID = t.getTransformationID() result = tc.createTransformationInputDataQuery(transID['Value'], query) if not result['OK']: gLogger.error('Can not create query to transformation: %s' % result['Message']) exit(2)
def createTransformation(self): ######################################## # Transformation definition ######################################## t = Transformation() t.setTransformationName(self.__transName) t.setType(self.__transType) t.setDescription(self.__description) t.setLongDescription(self.__description) t.setGroupSize(self.__groupSize) if self.__transGroup: t.setTransformationGroup(self.__transGroup) t.setPlugin(self.__plugin) # t.setSourceSE(self.__sourceSE) t.setTargetSE(self.__targetSE) transBody = [] # transBody.append( # ("ReplicateAndRegister", {"TargetSE": ','.join(self.__targetSE)})) # for tse in self.__targetSE: # sse = list(set(self.__sourceSE) - set([tse])) # transBody.append(("ReplicateAndRegister", {"SourceSE": ','.join(sse), "TargetSE": ','.join(tse)})) # # if self.__flavour == 'Moving': # for sse in self.__sourceSE: # if sse in self.__targetSE: # continue # gLogger.debug('Remove from SE: {0}'.format(sse)) # transBody.append(("RemoveReplica", {"TargetSE": ','.join(sse)})) # # transBody.append(("ReplicateAndRegister", {"SourceSE": ','.join( # self.__sourceSE), "TargetSE": ','.join(self.__targetSE)})) # if self.__flavour == 'Moving': # transBody.append( # ("RemoveReplica", {"TargetSE": ','.join(self.__sourceSE)})) t.setBody(transBody) ######################################## # Transformation submission ######################################## res = t.addTransformation() if not res['OK']: raise Exception('Add transformation error: {0}'.format( res['Message'])) t.setStatus("Active") t.setAgentType("Automatic") currtrans = t.getTransformationID()['Value'] if self.__inputMeta: client = TransformationClient() res = client.createTransformationInputDataQuery( currtrans, self.__inputMeta) if not res['OK']: raise Exception( 'Create transformation query error: {0}'.format( res['Message'])) return str(currtrans)