def __init__(self, operation=None, csPath=None): """c'tor :param self: self reference :param Operation operation: Operation instance :param str csPath: CS path for this handler """ # # base classes ctor super(PutAndRegister, self).__init__(operation, csPath) # # gMonitor stuff gMonitor.registerActivity("PutAtt", "File put attempts", "RequestExecutingAgent", "Files/min", gMonitor.OP_SUM) gMonitor.registerActivity("PutFail", "Failed file puts", "RequestExecutingAgent", "Files/min", gMonitor.OP_SUM) gMonitor.registerActivity("PutOK", "Successful file puts", "RequestExecutingAgent", "Files/min", gMonitor.OP_SUM) gMonitor.registerActivity("RegisterOK", "Successful file registrations", "RequestExecutingAgent", "Files/min", gMonitor.OP_SUM) gMonitor.registerActivity("RegisterFail", "Failed file registrations", "RequestExecutingAgent", "Files/min", gMonitor.OP_SUM) self.rm = ReplicaManager()
def __init__( self, *args, **kwargs ): ''' c'tor ''' AgentModule.__init__( self, *args, **kwargs ) # # replica manager self.replicaManager = ReplicaManager() # # transformation client self.transClient = TransformationClient() # # wms client self.wmsClient = WMSClient() # # request client self.requestClient = RequestClient() # # file catalog clinet self.metadataClient = FileCatalogClient() # # placeholders for CS options # # transformations types self.transformationTypes = None # # directory locations self.directoryLocations = None # # transformation metadata self.transfidmeta = None # # archive periof in days self.archiveAfter = None # # active SEs self.activeStorages = None # # transformation log SEs self.logSE = None # # enable/disable execution self.enableFlag = None
def replicaManager(cls): """ ReplicaManager getter :param cls: class reference """ if not cls.__replicaManager: cls.__replicaManager = ReplicaManager() return cls.__replicaManager
def __init__(self, loggerIn=None): """ Initialization of module base. loggerIn is a logger object that can be passed so that the logging will be more clear. """ if not loggerIn: self.log = gLogger.getSubLogger('ModuleBase') else: self.log = loggerIn # These 2 are used in many places, so it's good to have them available here. self.opsH = Operations() self.rm = ReplicaManager() # Some job parameters self.production_id = 0 self.prod_job_id = 0 self.jobID = 0 self.step_number = 0 self.step_id = 0 self.jobType = '' self.executable = '' self.command = None self.workflowStatus = None self.stepStatus = None self.workflow_commons = None self.step_commons = None # These are useful objects (see the getFileReporter(), getJobReporter() and getRequestContainer() functions) self.fileReport = None self.jobReport = None self.request = None
def __copyToExternalSE(self, localFilePath, sbPath): """ Copy uploaded file to external SE """ try: rm = ReplicaManager() result = rm.put(sbPath, localFilePath, self.__externalSEName) if not result['OK']: return result if 'Successful' not in result['Value']: gLogger.verbose("Oops, no successful transfers there", str(result)) return S_ERROR( "RM returned OK to the action but no successful transfers were there" ) okTrans = result['Value']['Successful'] if sbPath not in okTrans: gLogger.verbose( "Ooops, SB transfer wasn't in the successful ones", str(result)) return S_ERROR( "RM returned OK to the action but SB transfer wasn't in the successful ones" ) return S_OK((self.__externalSEName, okTrans[sbPath])) except Exception, e: return S_ERROR("Error while moving sandbox to SE: %s" % str(e))
def replicaManager( cls ): """ ReplicaManager getter :param cls: class reference """ if not cls.__replicaManager: from DIRAC.DataManagementSystem.Client.ReplicaManager import ReplicaManager cls.__replicaManager = ReplicaManager() return cls.__replicaManager
def _getClients( self ): """ returns the clients used in the threads """ threadTransformationClient = TransformationClient() threadReplicaManager = ReplicaManager() return {'TransformationClient': threadTransformationClient, 'ReplicaManager': threadReplicaManager}
def registerInputData(filepath, size, prefix='/cepc/lustre-ro'): infoDict = {} infoDict['PFN'] = '' infoDict['Size'] = size infoDict['SE'] = 'IHEP-STORM' infoDict['GUID'] = commands.getoutput('uuidgen') infoDict['Checksum'] = '' fileDict = {} lfn = prefix + filepath fileDict[lfn] = infoDict fcc = FileCatalogClient('DataManagement/FileCatalog') rm = ReplicaManager() result = {} result['lfn'] = lfn result['is_registered'] = False #查询 for repeatTimes in range(10): is_registered = fcc.isFile(lfn) if (is_registered['OK'] and is_registered['Value']['Successful'].has_key(lfn)): break # else: # continue if not is_registered['OK']: #查询失败 result['is_registered'] = 'querry error. unkown' print 'Failed to query %s in DFC. Error message is %s' % ( lfn, is_registered['Message']) if is_registered['Value']['Successful'][lfn]: #已注册 result['is_registered'] = True for repeatTimes in range(10): is_removed = rm.removeCatalogFile(lfn) #删除 if (is_removed['OK'] and is_removed['Value']['Successful'][lfn]['FileCatalog']): result['is_removed'] = True break # else: # continue if not is_removed['OK']: #删除失败 result['is_removed'] = 'remove error' print 'Failed to remove %s from DFC.' % lfn #add for repeatTimes in range(10): is_added = fcc.addFile(fileDict) #add/register if (is_added['OK'] and is_added['Value']['Successful'][lfn]): result['OK'] = True return result # else: # continue if not is_added['OK']: #add unsuccessfully result['OK'] = False result['Message'] = is_added['Message'] elif is_added['Value']['Failed']: result['OK'] = False result['Message'] = 'Failed to add file' + lfn return result
def setUp( self ): super( IntegrationTest, self ).setUp() rm = ReplicaManager() res = rm.removeFile( ['/lhcb/testCfg/testVer/LOG/00012345/0006/00012345_00067890.tar', '/lhcb/testCfg/testVer/SIM/00012345/0006/00012345_00067890_1.sim'], force = True ) if not res['OK']: print "Could not remove files", res['Message'] exit( 1 )
def initialize(self): self.RequestDBClient = RequestClient() self.ReplicaManager = ReplicaManager() # This sets the Default Proxy to used as that defined under # /Operations/Shifter/DataManager # the shifterProxy option in the Configuration can be used to change this default. self.am_setOption('shifterProxy', 'DataManager') return S_OK()
def __init__(self, argumentsDict): """ Standard constructor """ self.name = COMPONENT_NAME self.log = gLogger.getSubLogger(self.name) self.inputData = argumentsDict['InputData'] self.configuration = argumentsDict['Configuration'] self.fileCatalogResult = argumentsDict['FileCatalog'] self.jobID = None self.replicaManager = ReplicaManager()
def initialize( self ): self.replicaManager = ReplicaManager() #self.stagerClient = StorageManagerClient() self.dataIntegrityClient = DataIntegrityClient() self.storageDB = StorageManagementDB() # This sets the Default Proxy to used as that defined under # /Operations/Shifter/DataManager # the shifterProxy option in the Configuration can be used to change this default. self.am_setOption( 'shifterProxy', 'DataManager' ) return S_OK()
def initialize( self ): self.replicaManager = ReplicaManager() self.stagerClient = StorageManagerClient() self.pinLifeTime = 60 * 60 * 24 * 7 # 7 days # This sets the Default Proxy to used as that defined under # /Operations/Shifter/DataManager # the shifterProxy option in the Configuration can be used to change this default. self.am_setOption( 'shifterProxy', 'DataManager' ) return S_OK()
def __init__( self, requestObject = None ): """ Constructor function, can specify request object to instantiate FailoverTransfer or a new request object is created. """ self.log = gLogger.getSubLogger( "FailoverTransfer" ) self.replicaMgr = ReplicaManager() self.request = requestObject if not self.request: self.request = Request() self.request.RequestName = 'default_request.xml' self.request.SourceComponent = 'FailoverTransfer'
def initializeOptimizer(cls): """Initialize specific parameters """ cls.ex_setProperty('shifterProxy', 'DataManager') cls.__SEStatus = DictCache.DictCache() try: cls.__replicaMan = ReplicaManager() except Exception, e: msg = 'Failed to create ReplicaManager' cls.log.exception(msg) return S_ERROR(msg + str(e))
def __init__(self, requestObject=False): """ Constructor function, can specify request object to instantiate FailoverTransfer or a new request object is created. """ self.log = gLogger.getSubLogger("FailoverTransfer") self.rm = ReplicaManager() self.request = requestObject if not self.request: self.request = RequestContainer() self.request.setRequestName('default_request.xml') self.request.setSourceComponent('FailoverTransfer')
def __init__(self, argumentsDict): """ Standard constructor """ self.name = COMPONENT_NAME self.log = gLogger.getSubLogger(self.name) self.inputData = argumentsDict['InputData'] self.configuration = argumentsDict['Configuration'] self.fileCatalogResult = argumentsDict['FileCatalog'] # By default put each input data file into a separate directory self.inputDataDirectory = argumentsDict.get('InputDataDirectory', 'PerFile') self.jobID = None self.replicaManager = ReplicaManager() self.counter = 1
def initialize(self): self.pluginLocation = self.am_getOption( 'PluginLocation', 'DIRAC.TransformationSystem.Agent.TransformationPlugin') self.checkCatalog = self.am_getOption('CheckCatalog', 'yes') # This sets the Default Proxy to used as that defined under # /Operations/Shifter/ProductionManager # the shifterProxy option in the Configuration can be used to change this default. self.am_setOption('shifterProxy', 'ProductionManager') self.transDB = TransformationClient('TransformationDB') self.rm = ReplicaManager() return S_OK()
def __init__( self, plugin, transClient = None, replicaManager = None ): self.params = False self.data = False self.plugin = plugin self.files = False if transClient == None: from DIRAC.TransformationSystem.Client.TransformationClient import TransformationClient self.transClient = TransformationClient() else: self.transClient = transClient if replicaManager == None: from DIRAC.DataManagementSystem.Client.ReplicaManager import ReplicaManager self.rm = ReplicaManager() else: self.rm = replicaManager
def initialize(self): self.section = PathFinder.getAgentSection(AGENT_NAME) self.RequestDB = RequestDBMySQL() self.TransferDB = TransferDB() self.DataLog = DataLoggingClient() self.factory = StorageFactory() self.rm = ReplicaManager() # This sets the Default Proxy to used as that defined under # /Operations/Shifter/DataManager # the shifterProxy option in the Configuration can be used to change this default. self.am_setOption('shifterProxy', 'DataManager') return S_OK()
def initialize(self): self.replicaManager = ReplicaManager() #self.stagerClient = StorageManagerClient() self.dataIntegrityClient = DataIntegrityClient() self.storageDB = StorageManagementDB() # pin lifetime = 1 day self.pinLifetime = self.am_getOption('PinLifetime', THROTTLING_TIME) # Resources helper self.resources = Resources() # This sets the Default Proxy to used as that defined under # /Operations/Shifter/DataManager # the shifterProxy option in the Configuration can be used to change this default. self.am_setOption('shifterProxy', 'DataManager') return S_OK()
def initialize(self): self.RequestDBClient = RequestClient() self.ReplicaManager = ReplicaManager() self.DataLog = DataLoggingClient() self.maxNumberOfThreads = self.am_getOption('NumberOfThreads', 1) self.threadPoolDepth = self.am_getOption('ThreadPoolDepth', 1) self.threadPool = ThreadPool(1, self.maxNumberOfThreads) # This sets the Default Proxy to used as that defined under # /Operations/Shifter/DataManager # the shifterProxy option in the Configuration can be used to change this default. self.am_setOption('shifterProxy', 'DataManager') return S_OK()
def finish(self): """ after having set all the files, this one does all the job @return: """ rc = 0 rm = ReplicaManager() for item in self.listFileStaged: #print("SE '"+self.SE+"' == : '"+str(self.SE == "False")+"'") if not self.SE: self.log.info("No SE available for '" + item[0] + "'") rc += 1 continue else: self.log.info("Trying to store '" + item[0] + "' in SE : '" + self.SE + "' ...") result = rm.putAndRegister(item[1], item[0], self.SE) if not result['OK']: self.log.info('ERROR %s' % (result['Message'])) self.log.info("Wait 5sec before trying again...") time.sleep(5) result = rm.putAndRegister(item[1], item[0], self.SE) if not result['OK']: self.log.info('ERROR %s' % (result['Message'])) while not result['OK']: self.listSEs.remove( self.SE ) # make sure not to pick the same SE again. self.__pickRandomSE() if not self.SE: break self.log.info("Trying with another SE : '" + self.SE + "' . In 5sec...") time.sleep(5) result = rm.putAndRegister(item[1], item[0], self.SE) if result['OK']: self.log.info("file stored : '" + item[1] + "' in '" + self.SE + "'") else: self.log.error( "ERROR : failed to store the file '" + item[1] + "' ...") rc += 1 return rc
def __init__(self, plugin, transClient=None, replicaManager=None): """ plugin name has to be passed in: it will then be executed as one of the functions below, e.g. plugin = 'BySize' will execute TransformationPlugin('BySize')._BySize() """ self.params = {} self.data = {} self.plugin = plugin self.files = False if transClient is None: self.transClient = TransformationClient() else: self.transClient = transClient if replicaManager is None: self.rm = ReplicaManager() else: self.rm = replicaManager
def initialize(self): """ standard init """ self.pluginLocation = self.am_getOption( 'PluginLocation', 'DIRAC.TransformationSystem.Agent.TransformationPlugin') self.checkCatalog = self.am_getOption('CheckCatalog', 'yes') self.transformationStatus = self.am_getOption( 'transformationStatus', ['Active', 'Completing', 'Flush']) self.maxFiles = self.am_getOption('MaxFiles', 5000) self.am_setOption('shifterProxy', 'ProductionManager') self.transDB = TransformationClient('TransformationDB') self.rm = ReplicaManager() self.unusedFiles = {} return S_OK()
def __prepareFileForHTTP(self, lfn, key): global httpPath res = self.__prepareSecurityDetails() if not res['OK']: return res # Clear the local cache getFileDir = "%s/%s" % (httpPath, key) os.makedirs(getFileDir) # Get the file to the cache from DIRAC.DataManagementSystem.Client.ReplicaManager import ReplicaManager rm = ReplicaManager() result = rm.getFile(lfn, destinationDir=getFileDir) result['CachePath'] = getFileDir return result
def initializeOptimizer( self ): """Initialize specific parameters for JobSanityAgent. """ self.failedMinorStatus = self.am_getOption( '/FailedJobStatus', 'Input Data Not Available' ) #this will ignore failover SE files self.checkFileMetadata = self.am_getOption( 'CheckFileMetadata', True ) #Define the shifter proxy needed # This sets the Default Proxy to used as that defined under # /Operations/Shifter/ProductionManager # the shifterProxy option in the Configuration can be used to change this default. self.am_setOption( 'shifterProxy', 'ProductionManager' ) try: self.replicaManager = ReplicaManager() except Exception, e: msg = 'Failed to create ReplicaManager' self.log.exception( msg ) return S_ERROR( msg + str( e ) )
def removeOutputData(baseDir): res = getProxyInfo(False, False) if not res['OK']: gLogger.error("Failed to get client proxy information.", res['Message']) return S_ERROR("Failed to get client proxy information.", res['Message']) # ######################################################################################################## # rm = ReplicaManager() result = rm.cleanLogicalDirectory(baseDir) print "Ignore the message about the file '" + baseDir + "dirac_directory'" if not result['OK']: print 'ERROR: %s' % (result['Message']) return S_ERROR("Failed to Suppress the directory : '" + baseDir + "'") return S_OK(baseDir + " has been supressed")
def initialize(self): """Sets defaults """ self.replicaManager = ReplicaManager() self.transClient = TransformationClient() self.wmsClient = WMSClient() self.requestClient = RequestClient() self.metadataClient = FileCatalogClient() self.storageUsageClient = StorageUsageClient() # This sets the Default Proxy to used as that defined under # /Operations/Shifter/DataManager # the shifterProxy option in the Configuration can be used to change this default. self.am_setOption('shifterProxy', 'DataManager') self.transformationTypes = sortList( self.am_getOption('TransformationTypes', [ 'MCSimulation', 'DataReconstruction', 'DataStripping', 'MCStripping', 'Merge', 'Replication' ])) gLogger.info("Will consider the following transformation types: %s" % str(self.transformationTypes)) self.directoryLocations = sortList( self.am_getOption( 'DirectoryLocations', ['TransformationDB', 'StorageUsage', 'MetadataCatalog'])) gLogger.info( "Will search for directories in the following locations: %s" % str(self.directoryLocations)) self.transfidmeta = self.am_getOption('TransfIDMeta', "TransformationID") gLogger.info("Will use %s as metadata tag name for TransformationID" % self.transfidmeta) self.archiveAfter = self.am_getOption('ArchiveAfter', 7) # days gLogger.info("Will archive Completed transformations after %d days" % self.archiveAfter) self.activeStorages = sortList(self.am_getOption('ActiveSEs', [])) gLogger.info("Will check the following storage elements: %s" % str(self.activeStorages)) self.logSE = self.am_getOption('TransformationLogSE', 'LogSE') gLogger.info("Will remove logs found on storage element: %s" % self.logSE) return S_OK()
def __init__(self, useCertificates=False): """c'tor :param self: self reference :param bool useCertificates: flag to enable/disable certificates """ Client.__init__(self) self.log = gLogger.getSubLogger("DataManagement/FTSClient") self.setServer("DataManagement/FTSManager") # getting other clients self.ftsValidator = FTSValidator() self.replicaManager = ReplicaManager() self.storageFactory = StorageFactory() url = PathFinder.getServiceURL("DataManagement/FTSManager") if not url: raise RuntimeError( "CS option DataManagement/FTSManager URL is not set!") self.ftsManager = RPCClient(url)