prodList.append(int(eval(r[0]))) except (ValueError, NameError) as e: gLogger.exception("Bad production list: %s" % args[0], lException=e) # In case the user asked for specific LFNs if not status: lfnList = dmScript.getOption('LFNs', []) if not status and not lfnList and not runsList and not fromProd and not force: gLogger.fatal("You are about to check descendants for all files in a production") gLogger.fatal("If you really want to do so, use --Force") DIRAC.exit(0) from LHCbDIRAC.DataManagementSystem.Client.ConsistencyChecks import ConsistencyChecks from LHCbDIRAC.BookkeepingSystem.Client.BKQuery import BKQuery from DIRAC.TransformationSystem.Client.TransformationClient import TransformationClient tr = TransformationClient() for prod in prodList: startTime = time.time() cc = ConsistencyChecks() # Setting the prod also sets its type try: cc.prod = prod except RuntimeError as e: gLogger.exception(lException=e) continue if fileType and cc.transType in ('Merge', 'MCMerge'): gLogger.notice("It is not allowed to select file type for merging transformation", prod) continue cc.verbose = verbose cc.noFC = noFC cc.descendantsDepth = depth
def __init__(self): self.transClient = TransformationClient() self.prodClient = ProductionClient()
def __init__(self, *args, **kwargs): AgentModule.__init__(self, *args, **kwargs) self.name = 'DataRecoveryAgent' self.enabled = False self.getJobInfoFromJDLOnly = False self.__getCSOptions() self.jobStatus = [ 'Failed', 'Done' ] # This needs to be both otherwise we cannot account for all cases self.jobMon = JobMonitoringClient() self.fcClient = FileCatalogClient() self.tClient = TransformationClient() self.reqClient = ReqClient() self.diracAPI = Dirac() self.inputFilesProcessed = set() self.todo = {'NoInputFiles': [dict(Message="NoInputFiles: OutputExists: Job 'Done'", ShortMessage="NoInputFiles: job 'Done' ", Counter=0, Check=lambda job: job.allFilesExist() and job.status == 'Failed', Actions=lambda job, tInfo: [job.setJobDone(tInfo)], ), dict(Message="NoInputFiles: OutputMissing: Job 'Failed'", ShortMessage="NoInputFiles: job 'Failed' ", Counter=0, Check=lambda job: job.allFilesMissing() and job.status == 'Done', Actions=lambda job, tInfo: [job.setJobFailed(tInfo)], ), ], 'InputFiles': [ \ # must always be first! dict(Message="One of many Successful: clean others", ShortMessage="Other Tasks --> Keep", Counter=0, Check=lambda job: job.allFilesExist() and job.otherTasks and \ not set(job.inputFiles).issubset(self.inputFilesProcessed), Actions=lambda job, tInfo: [self.inputFilesProcessed.update(job.inputFiles), job.setJobDone(tInfo), job.setInputProcessed(tInfo)] ), dict(Message="Other Task processed Input, no Output: Fail", ShortMessage="Other Tasks --> Fail", Counter=0, Check=lambda job: set(job.inputFiles).issubset(self.inputFilesProcessed) and \ job.allFilesMissing() and job.status != 'Failed', Actions=lambda job, tInfo: [job.setJobFailed(tInfo)] ), dict(Message="Other Task processed Input: Fail and clean", ShortMessage="Other Tasks --> Cleanup", Counter=0, Check=lambda job: set(job.inputFiles).issubset( self.inputFilesProcessed) and not job.allFilesMissing(), Actions=lambda job, tInfo: [job.setJobFailed(tInfo), job.cleanOutputs(tInfo)] ), dict(Message="InputFile(s) missing: mark job 'Failed', mark input 'Deleted', clean", ShortMessage="Input Missing --> Job 'Failed, Input 'Deleted', Cleanup", Counter=0, Check=lambda job: job.inputFiles and job.allInputFilesMissing() and \ not job.allTransFilesDeleted(), Actions=lambda job, tInfo: [job.cleanOutputs(tInfo), job.setJobFailed(tInfo), job.setInputDeleted(tInfo)], ), dict(Message="InputFile(s) Deleted, output Exists: mark job 'Failed', clean", ShortMessage="Input Deleted --> Job 'Failed, Cleanup", Counter=0, Check=lambda job: job.inputFiles and job.allInputFilesMissing() and \ job.allTransFilesDeleted() and not job.allFilesMissing(), Actions=lambda job, tInfo: [job.cleanOutputs(tInfo), job.setJobFailed(tInfo)], ), # All Output Exists dict(Message="Output Exists, job Failed, input not Processed --> Job Done, Input Processed", ShortMessage="Output Exists --> Job Done, Input Processed", Counter=0, Check=lambda job: job.allFilesExist() and \ not job.otherTasks and \ job.status == 'Failed' and \ not job.allFilesProcessed() and \ job.allInputFilesExist(), Actions=lambda job, tInfo: [job.setJobDone(tInfo), job.setInputProcessed(tInfo)] ), dict(Message="Output Exists, job Failed, input Processed --> Job Done", ShortMessage="Output Exists --> Job Done", Counter=0, Check=lambda job: job.allFilesExist() and \ not job.otherTasks and \ job.status == 'Failed' and \ job.allFilesProcessed() and \ job.allInputFilesExist(), Actions=lambda job, tInfo: [job.setJobDone(tInfo)] ), dict(Message="Output Exists, job Done, input not Processed --> Input Processed", ShortMessage="Output Exists --> Input Processed", Counter=0, Check=lambda job: job.allFilesExist() and \ not job.otherTasks and \ job.status == 'Done' and \ not job.allFilesProcessed() and \ job.allInputFilesExist(), Actions=lambda job, tInfo: [job.setInputProcessed(tInfo)] ), # outputmissing dict(Message="Output Missing, job Failed, input Assigned, MaxError --> Input MaxReset", ShortMessage="Max ErrorCount --> Input MaxReset", Counter=0, Check=lambda job: job.allFilesMissing() and \ not job.otherTasks and \ job.status == 'Failed' and \ job.allFilesAssigned() and \ not set(job.inputFiles).issubset(self.inputFilesProcessed) and \ job.allInputFilesExist() and \ job.checkErrorCount(), Actions=lambda job, tInfo: [job.setInputMaxReset(tInfo)] ), dict(Message="Output Missing, job Failed, input Assigned --> Input Unused", ShortMessage="Output Missing --> Input Unused", Counter=0, Check=lambda job: job.allFilesMissing() and \ not job.otherTasks and \ job.status == 'Failed' and \ job.allFilesAssigned() and \ not set(job.inputFiles).issubset(self.inputFilesProcessed) and \ job.allInputFilesExist(), Actions=lambda job, tInfo: [job.setInputUnused(tInfo)] ), dict(Message="Output Missing, job Done, input Assigned --> Job Failed, Input Unused", ShortMessage="Output Missing --> Job Failed, Input Unused", Counter=0, Check=lambda job: job.allFilesMissing() and \ not job.otherTasks and \ job.status == 'Done' and \ job.allFilesAssigned() and \ not set(job.inputFiles).issubset(self.inputFilesProcessed) and \ job.allInputFilesExist(), Actions=lambda job, tInfo: [job.setInputUnused(tInfo), job.setJobFailed(tInfo)] ), # some files missing, needing cleanup. Only checking for # assigned, because processed could mean an earlier job was # succesful and this one is just the duplicate that needed # to be removed! But we check for other tasks earlier, so # this should not happen dict(Message="Some missing, job Failed, input Assigned --> cleanup, Input 'Unused'", ShortMessage="Output Missing --> Cleanup, Input Unused", Counter=0, Check=lambda job: job.someFilesMissing() and \ not job.otherTasks and \ job.status == 'Failed' and \ job.allFilesAssigned() and \ job.allInputFilesExist(), Actions=lambda job, tInfo: [job.cleanOutputs(tInfo), job.setInputUnused(tInfo)] ), dict(Message="Some missing, job Done, input Assigned --> cleanup, job Failed, Input 'Unused'", ShortMessage="Output Missing --> Cleanup, Job Failed, Input Unused", Counter=0, Check=lambda job: job.someFilesMissing() and \ not job.otherTasks and \ job.status == 'Done' and \ job.allFilesAssigned() and \ job.allInputFilesExist(), Actions=lambda job, tInfo: [ job.cleanOutputs(tInfo), job.setInputUnused(tInfo), job.setJobFailed(tInfo)] ), dict(Message="Some missing, job Done --> job Failed", ShortMessage="Output Missing, Done --> Job Failed", Counter=0, Check=lambda job: not job.allFilesExist() and job.status == 'Done', Actions=lambda job, tInfo: [job.setJobFailed(tInfo)] ), dict(Message="Something Strange", ShortMessage="Strange", Counter=0, Check=lambda job: job.status not in ("Failed", "Done"), Actions=lambda job, tInfo: [] ), # should always be the last one! dict(Message="Failed Hard", ShortMessage="Failed Hard", Counter=0, Check=lambda job: False, # never Actions=lambda job, tInfo: [] ), ] } self.jobCache = defaultdict(lambda: (0, 0)) # Notification options self.notesToSend = "" self.subject = "DataRecoveryAgent" self.startTime = time.time()
def setUp(self): self.transClient = TransformationClient()
def createDataTransformation( flavour, targetSE, sourceSE, metaKey, metaValue, extraData=None, extraname='', groupSize=1, plugin='Broadcast', tGroup=None, tBody=None, enable=False, ): """Creates the replication transformation based on the given parameters. :param str flavour: Flavour of replication to create: Replication or Moving :param targetSE: Destination for files :type targetSE: python:list or str :param str sourceSE: Origin of files. :param int metaKey: Meta key to identify input files :param int metaValue: Meta value to identify input files :param dict metaData: Additional meta data to use to identify input files :param str extraname: addition to the transformation name, only needed if the same transformation was already created :param int groupSize: number of files per transformation taks :param str plugin: plugin to use :param str tGroup: transformation group to set :param tBody: transformation body to set :param bool enable: if true submit the transformation, otherwise dry run :returns: S_OK (with the transformation object, if successfully added), S_ERROR """ metadata = {metaKey: metaValue} if isinstance(extraData, dict): metadata.update(extraData) gLogger.debug("Using %r for metadata search" % metadata) if isinstance(targetSE, basestring): targetSE = [targetSE] gLogger.debug('Using plugin: %r' % plugin) if flavour not in ('Replication', 'Moving'): return S_ERROR('Unsupported flavour %s' % flavour) transVerb = {'Replication': 'Replicate', 'Moving': 'Move'}[flavour] transGroup = { 'Replication': 'Replication', 'Moving': 'Moving' }[flavour] if not tGroup else tGroup trans = Transformation() transName = '%s_%s_%s' % (transVerb, str(metaValue), ",".join(targetSE)) if extraname: transName += "_%s" % extraname trans.setTransformationName(transName) description = '%s files for %s %s to %s' % ( transVerb, metaKey, str(metaValue), ",".join(targetSE)) trans.setDescription(description) trans.setLongDescription(description) trans.setType('Replication') trans.setTransformationGroup(transGroup) trans.setGroupSize(groupSize) trans.setPlugin(plugin) transBody = { 'Moving': [("ReplicateAndRegister", { "SourceSE": sourceSE, "TargetSE": targetSE }), ("RemoveReplica", { "TargetSE": sourceSE })], 'Replication': '', # empty body }[flavour] if tBody is None else tBody trans.setBody(transBody) if sourceSE: res = trans.setSourceSE(sourceSE) if not res['OK']: return S_ERROR("SourceSE not valid: %s" % res['Message']) res = trans.setTargetSE(targetSE) if not res['OK']: return S_ERROR("TargetSE not valid: %s" % res['Message']) if not enable: gLogger.always("Dry run, not creating transformation") return S_OK() res = trans.addTransformation() if not res['OK']: return res gLogger.verbose(res) trans.setStatus('Active') trans.setAgentType('Automatic') currtrans = trans.getTransformationID()['Value'] client = TransformationClient() res = client.createTransformationInputDataQuery(currtrans, metadata) if not res['OK']: return res gLogger.always("Successfully created replication transformation") return S_OK(trans)
def __init__(self): self.transClient = TransformationClient() self.indentSpace = 4 CLI.__init__(self) API.__init__(self)
def main(): # Registering arguments will automatically add their description to the help menu Script.registerArgument("prodID: Production ID") _, args = Script.parseCommandLine() from DIRAC.ProductionSystem.Client.ProductionClient import ProductionClient from DIRAC.TransformationSystem.Client.TransformationClient import TransformationClient # get arguments prodID = args[0] prodClient = ProductionClient() transClient = TransformationClient() res = prodClient.getProductionTransformations(prodID) transIDs = [] if res["OK"]: transList = res["Value"] if not transList: DIRAC.gLogger.notice( "No transformation associated with production %s" % prodID) DIRAC.exit(-1) for trans in transList: transIDs.append(trans["TransformationID"]) else: DIRAC.gLogger.error(res["Message"]) DIRAC.exit(-1) fields = [ "TransformationName", "Status", "F_Proc.", "F_Proc.(%)", "TransformationID", "ProductionID", "Prod_LastUpdate", "Prod_InsertedTime", ] records = [] paramShowNames = [ "TransformationID", "TransformationName", "Type", "Status", "Files_Total", "Files_PercentProcessed", "Files_Processed", "Files_Unused", "Jobs_TotalCreated", "Jobs_Waiting", "Jobs_Running", "Jobs_Done", "Jobs_Failed", "Jobs_Stalled", ] resList = [] res = transClient.getTransformationSummaryWeb( {"TransformationID": transIDs}, [], 0, len(transIDs)) if not res["OK"]: DIRAC.gLogger.error(res["Message"]) DIRAC.exit(-1) if res["Value"]["TotalRecords"] > 0: paramNames = res["Value"]["ParameterNames"] for paramValues in res["Value"]["Records"]: paramShowValues = map( lambda pname: paramValues[paramNames.index(pname)], paramShowNames) showDict = dict(zip(paramShowNames, paramShowValues)) resList.append(showDict) for res in resList: files_Processed = res["Files_Processed"] files_PercentProcessed = res["Files_PercentProcessed"] status = res["Status"] type = res["Type"] transName = res["TransformationName"] transID = res["TransformationID"] records.append([ transName, status, str(files_Processed), str(files_PercentProcessed), str(transID), str(prodID), str(trans["LastUpdate"]), str(trans["InsertedTime"]), ]) printTable(fields, records) DIRAC.exit(0)
def web_getSelectionData( self ): sData = self.getSessionData() callback = {} group = sData["user"]["group"] user = sData["user"]["username"] if user == "Anonymous": callback["prod"] = [["Insufficient rights"]] else: callback = {} #### tsClient = TransformationClient() result = yield self.threadTask( tsClient.getDistinctAttributeValues, "Plugin", {} ) if result["OK"]: plugin = [] if len( result["Value"] ) > 0: for i in result["Value"]: plugin.append( [str( i )] ) else: plugin.append( "Nothing to display" ) else: plugin = "Error during RPC call" callback["plugin"] = plugin #### result = yield self.threadTask( tsClient.getDistinctAttributeValues, "Status", {} ) if result["OK"]: status = [] if len( result["Value"] ) > 0: for i in result["Value"]: status.append( [str( i )] ) else: status = "Nothing to display" else: status = "Error during RPC call" callback["prodStatus"] = status #### result = yield self.threadTask( tsClient.getDistinctAttributeValues, "TransformationGroup", {} ) if result["OK"]: group = [] if len( result["Value"] ) > 0: for i in result["Value"]: group.append( [str( i )] ) else: group = "Nothing to display" else: group = "Error during RPC call" callback["transformationGroup"] = group #### result = yield self.threadTask( tsClient.getDistinctAttributeValues, "AgentType", {} ) if result["OK"]: atype = [] if len( result["Value"] ) > 0: for i in result["Value"]: atype.append( [str( i )] ) else: atype = "Nothing to display" else: atype = "Error during RPC call" callback["agentType"] = atype #### result = yield self.threadTask( tsClient.getDistinctAttributeValues, "Type", {} ) if result["OK"]: type = [] if len( result["Value"] ) > 0: for i in result["Value"]: type.append( [str( i )] ) else: type = "Nothing to display" else: type = "Error during RPC call" callback["productionType"] = type self.finish( callback )
def web_getTransformationData( self ): pagestart = Time.time() callback = None sData = self.getSessionData() callback = {} user = sData["user"]["username"] tsClient = TransformationClient() if user == "Anonymous": callback = {"success":"false", "error":"You are not authorised"} else: result = self.__request() result = yield self.threadTask( tsClient.getTransformationSummaryWeb, result, self.globalSort, self.pageNumber, self.numberOfJobs ) if not result["OK"]: self.finish( json.dumps( {"success":"false", "error":result["Message"]} ) ) return result = result["Value"] if not result.has_key( "TotalRecords" ): self.finish( json.dumps( {"success":"false", "result":"", "error":"Data structure is corrupted"} ) ) return if not ( result["TotalRecords"] > 0 ): self.finish( json.dumps( {"success":"false", "result":"", "error":"There were no data matching your selection"} ) ) return if not ( result.has_key( "ParameterNames" ) and result.has_key( "Records" ) ): self.finish( json.dumps( {"success":"false", "result":"", "error":"Data structure is corrupted"} ) ) return if not ( len( result["ParameterNames"] ) > 0 ): self.finish( json.dumps( {"success":"false", "result":"", "error":"ParameterNames field is missing"} ) ) return if not ( len( result["Records"] ) > 0 ): self.finish( json.dumps( {"success":"false", "Message":"There are no data to display"} ) ) return callback = [] jobs = result["Records"] head = result["ParameterNames"] headLength = len( head ) for i in jobs: tmp = {} for j in range( 0, headLength ): tmp[head[j]] = i[j] callback.append( tmp ) total = result["TotalRecords"] if "Extras" in result: gLogger.info( result["Extras"] ) extra = result["Extras"] timestamp = Time.dateTime().strftime( "%Y-%m-%d %H:%M [UTC]" ) callback = {"success":"true", "result":callback, "total":total, "extra":extra, "date":timestamp} else: callback = {"success":"true", "result":callback, "total":total, "date":None} gLogger.info( "\033[0;31m PRODUCTION SUBMIT REQUEST: \033[0m %s" % ( Time.time() - pagestart ) ) self.finish( json.dumps( callback ) )
def initialize(self): """ agent initialisation reading and setting confing opts :param self: self reference """ # # shifter proxy # See cleanCatalogContents method: this proxy will be used ALSO when the file catalog used # is the DIRAC File Catalog (DFC). # This is possible because of unset of the "UseServerCertificate" option self.shifterProxy = self.am_getOption('shifterProxy', None) # # transformations types self.dataProcTTypes = Operations().getValue( 'Transformations/DataProcessing', self.dataProcTTypes) self.dataManipTTypes = Operations().getValue( 'Transformations/DataManipulation', self.dataManipTTypes) agentTSTypes = self.am_getOption('TransformationTypes', []) if agentTSTypes: self.transformationTypes = sorted(agentTSTypes) else: self.transformationTypes = sorted(self.dataProcTTypes + self.dataManipTTypes) self.log.info("Will consider the following transformation types: %s" % str(self.transformationTypes)) # # directory locations self.directoryLocations = sorted( self.am_getOption('DirectoryLocations', self.directoryLocations)) self.log.info( "Will search for directories in the following locations: %s" % str(self.directoryLocations)) # # transformation metadata self.transfidmeta = self.am_getOption('TransfIDMeta', self.transfidmeta) self.log.info("Will use %s as metadata tag name for TransformationID" % self.transfidmeta) # # archive periof in days self.archiveAfter = self.am_getOption('ArchiveAfter', self.archiveAfter) # days self.log.info("Will archive Completed transformations after %d days" % self.archiveAfter) # # active SEs self.activeStorages = sorted( self.am_getOption('ActiveSEs', self.activeStorages)) if self.activeStorages: self.log.info("Will check the following storage elements: %s" % str(self.activeStorages)) # # transformation log SEs self.logSE = Operations().getValue('/LogStorage/LogSE', self.logSE) self.log.info("Will remove logs found on storage element: %s" % self.logSE) # # transformation client self.transClient = TransformationClient() # # wms client self.wmsClient = WMSClient() # # request client self.reqClient = ReqClient() # # file catalog client self.metadataClient = FileCatalogClient() return S_OK()
def initialize(self): """ agent initialisation reading and setting confing opts :param self: self reference """ # # shifter proxy self.am_setOption('shifterProxy', 'DataManager') # # transformations types self.dataProcTTypes = Operations().getValue( 'Transformations/DataProcessing', ['MCSimulation', 'Merge']) self.dataManipTTypes = Operations().getValue( 'Transformations/DataManipulation', ['Replication', 'Removal']) agentTSTypes = self.am_getOption('TransformationTypes', []) if agentTSTypes: self.transformationTypes = sorted(agentTSTypes) else: self.transformationTypes = sorted(self.dataProcTTypes + self.dataManipTTypes) self.log.info("Will consider the following transformation types: %s" % str(self.transformationTypes)) # # directory locations self.directoryLocations = sorted( self.am_getOption('DirectoryLocations', ['TransformationDB', 'MetadataCatalog'])) self.log.info( "Will search for directories in the following locations: %s" % str(self.directoryLocations)) # # transformation metadata self.transfidmeta = self.am_getOption('TransfIDMeta', "TransformationID") self.log.info("Will use %s as metadata tag name for TransformationID" % self.transfidmeta) # # archive periof in days self.archiveAfter = self.am_getOption('ArchiveAfter', 7) # days self.log.info("Will archive Completed transformations after %d days" % self.archiveAfter) # # active SEs self.activeStorages = sorted(self.am_getOption('ActiveSEs', [])) self.log.info("Will check the following storage elements: %s" % str(self.activeStorages)) # # transformation log SEs self.logSE = self.am_getOption('TransformationLogSE', 'LogSE') self.log.info("Will remove logs found on storage element: %s" % self.logSE) # # enable/disable execution, should be using CS option Status?? with default value as 'Active'?? self.enableFlag = self.am_getOption('EnableFlag', 'True') # # data manager # self.dm = DataManager() # # transformation client self.transClient = TransformationClient() # # wms client self.wmsClient = WMSClient() # # request client self.reqClient = ReqClient() # # file catalog client self.metadataClient = FileCatalogClient() return S_OK()
def __init__(self): self.server = TransformationClient() self.indentSpace = 4 cmd.Cmd.__init__(self) API.__init__(self)
def setUp(self): self.transClient = TransformationClient() self.fc = FileCatalog() self.dm = DataManager() self.metaCatalog = 'DIRACFileCatalog' gLogger.setLevel('DEBUG')
def web_showFileStatus(self): callback = {} start = int(self.request.arguments["start"][-1]) limit = int(self.request.arguments["limit"][-1]) try: id = self.request.arguments['transformationId'][-1] status = self.request.arguments['status'][-1] except KeyError as excp: raise WErr(400, "Missing %s" % excp) tsClient = TransformationClient() result = yield self.threadTask( tsClient.getTransformationFilesSummaryWeb, { 'TransformationID': id, 'Status': status }, [["FileID", "ASC"]], start, limit) if not result['OK']: callback = {"success": "false", "error": result["Message"]} else: result = result["Value"] if result.has_key("TotalRecords") and result["TotalRecords"] > 0: if result.has_key("ParameterNames") and result.has_key( "Records"): if len(result["ParameterNames"]) > 0: if len(result["Records"]) > 0: callback = [] jobs = result["Records"] head = result["ParameterNames"] headLength = len(head) for i in jobs: tmp = {} for j in range(0, headLength): tmp[head[j]] = i[j] callback.append(tmp) total = result["TotalRecords"] timestamp = Time.dateTime().strftime( "%Y-%m-%d %H:%M [UTC]") if result.has_key("Extras"): extra = result["Extras"] callback = { "success": "true", "result": callback, "total": total, "extra": extra, "date": timestamp } else: callback = { "success": "true", "result": callback, "total": total, "date": timestamp } else: callback = { "success": "false", "result": "", "error": "There are no data to display" } else: callback = { "success": "false", "result": "", "error": "ParameterNames field is undefined" } else: callback = { "success": "false", "result": "", "error": "Data structure is corrupted" } else: callback = { "success": "false", "result": "", "error": "There were no data matching your selection" } self.finish(callback)
def createTransformation(self): ######################################## # Transformation definition ######################################## t = Transformation() t.setTransformationName(self.__transName) t.setType(self.__transType) t.setDescription(self.__description) t.setLongDescription(self.__description) t.setGroupSize(self.__groupSize) if self.__transGroup: t.setTransformationGroup(self.__transGroup) t.setPlugin(self.__plugin) # t.setSourceSE(self.__sourceSE) t.setTargetSE(self.__targetSE) transBody = [] # transBody.append( # ("ReplicateAndRegister", {"TargetSE": ','.join(self.__targetSE)})) # for tse in self.__targetSE: # sse = list(set(self.__sourceSE) - set([tse])) # transBody.append(("ReplicateAndRegister", {"SourceSE": ','.join(sse), "TargetSE": ','.join(tse)})) # # if self.__flavour == 'Moving': # for sse in self.__sourceSE: # if sse in self.__targetSE: # continue # gLogger.debug('Remove from SE: {0}'.format(sse)) # transBody.append(("RemoveReplica", {"TargetSE": ','.join(sse)})) # # transBody.append(("ReplicateAndRegister", {"SourceSE": ','.join( # self.__sourceSE), "TargetSE": ','.join(self.__targetSE)})) # if self.__flavour == 'Moving': # transBody.append( # ("RemoveReplica", {"TargetSE": ','.join(self.__sourceSE)})) t.setBody(transBody) ######################################## # Transformation submission ######################################## res = t.addTransformation() if not res['OK']: raise Exception('Add transformation error: {0}'.format( res['Message'])) t.setStatus("Active") t.setAgentType("Automatic") currtrans = t.getTransformationID()['Value'] if self.__inputMeta: client = TransformationClient() res = client.createTransformationInputDataQuery( currtrans, self.__inputMeta) if not res['OK']: raise Exception( 'Create transformation query error: {0}'.format( res['Message'])) return str(currtrans)
def main(): # Registering arguments will automatically add their description to the help menu Script.registerArgument("prodID: Production ID") Script.registerArgument("transID: Transformation ID") Script.registerArgument("parentTransID: Parent Transformation ID", default="", mandatory=False) _, args = Script.parseCommandLine() from DIRAC.ProductionSystem.Client.ProductionClient import ProductionClient from DIRAC.TransformationSystem.Client.TransformationClient import TransformationClient prodClient = ProductionClient() transClient = TransformationClient() # get arguments prodID, transID, parentTransID = Script.getPositionalArgs(group=True) if len(args) > 3: Script.showHelp(exitCode=1) res = transClient.getTransformation(transID) if not res["OK"]: DIRAC.gLogger.error("Failed to get transformation %s: %s" % (transID, res["Message"])) DIRAC.exit(-1) transID = res["Value"]["TransformationID"] if parentTransID: res = transClient.getTransformation(parentTransID) if not res["OK"]: DIRAC.gLogger.error("Failed to get transformation %s: %s" % (parentTransID, res["Message"])) DIRAC.exit(-1) parentTransID = res["Value"]["TransformationID"] res = prodClient.getProduction(prodID) if not res["OK"]: DIRAC.gLogger.error("Failed to get production %s: %s" % (prodID, res["Message"])) DIRAC.exit(-1) prodID = res["Value"]["ProductionID"] res = prodClient.addTransformationsToProduction(prodID, transID, parentTransID) if not res["OK"]: DIRAC.gLogger.error(res["Message"]) DIRAC.exit(-1) if parentTransID: msg = "Transformation %s successfully added to production %s with parent transformation %s" % ( transID, prodID, parentTransID, ) else: msg = "Transformation %s successfully added to production %s with no parent transformation" % ( transID, prodID) DIRAC.gLogger.notice(msg) DIRAC.exit(0)