def __generateStorageObject(self, storageName, pluginName, parameters, hideExceptions=False): """ Generate a Storage Element from parameters collected :param storageName: is the storage section to check in the CS :param pluginName: name of the plugin used. Example: GFAL2_XROOT, GFAL2_SRM2... :param parameters: dictionary of protocol details. """ storageType = pluginName if self.proxy: storageType = 'Proxy' objectLoader = ObjectLoader() result = objectLoader.loadObject('Resources.Storage.%sStorage' % storageType, storageType + 'Storage', hideExceptions=hideExceptions) if not result['OK']: gLogger.error('Failed to load storage object: %s' % result['Message']) return result storageClass = result['Value'] try: storage = storageClass(storageName, parameters) except Exception as x: errStr = "StorageFactory._generateStorageObject: Failed to instantiate %s: %s" % (storageName, x) gLogger.exception(errStr) return S_ERROR(errStr) return S_OK(storage)
def initializeHandler(cls, svcInfoDict): """WMS AdministratorService initialization""" try: result = ObjectLoader().loadObject( "WorkloadManagementSystem.DB.JobDB", "JobDB") if not result["OK"]: return result cls.jobDB = result["Value"](parentLogger=cls.log) except RuntimeError as excp: return S_ERROR(f"Can't connect to DB: {excp!r}") cls.elasticJobParametersDB = None useESForJobParametersFlag = Operations().getValue( "/Services/JobMonitoring/useESForJobParametersFlag", False) if useESForJobParametersFlag: try: result = ObjectLoader().loadObject( "WorkloadManagementSystem.DB.ElasticJobParametersDB", "ElasticJobParametersDB") if not result["OK"]: return result cls.elasticJobParametersDB = result["Value"]() except RuntimeError as excp: return S_ERROR(f"Can't connect to DB: {excp!r}") cls.pilotManager = PilotManagerClient() return S_OK()
def initializeHandler(cls, serviceInfoDict): """Initialization of DB objects""" try: result = ObjectLoader().loadObject( "WorkloadManagementSystem.DB.PilotAgentsDB", "PilotAgentsDB") if not result["OK"]: return result cls.pilotAgentsDB = result["Value"](parentLogger=cls.log) except RuntimeError as excp: return S_ERROR("Can't connect to DB: %s" % excp) cls.pilotsLoggingDB = None enablePilotsLogging = Operations().getValue( "/Services/JobMonitoring/usePilotsLoggingFlag", False) if enablePilotsLogging: try: result = ObjectLoader().loadObject( "WorkloadManagementSystem.DB.PilotsLoggingDB", "PilotsLoggingDB") if not result["OK"]: return result cls.pilotsLoggingDB = result["Value"](parentLogger=cls.log) except RuntimeError as excp: return S_ERROR("Can't connect to DB: %s" % excp) return S_OK()
def initialize(self): """Standard initialize method""" res = TaskManagerAgentBase.initialize(self) if not res["OK"]: return res objLoader = ObjectLoader() _class = objLoader.loadObject( "TransformationSystem.Client.RequestTasks", "RequestTasks") if not _class["OK"]: raise Exception(_class["Message"]) self.requestTasksCls = _class["Value"] # clients self.taskManager = self.requestTasksCls(transClient=self.transClient) agentTSTypes = self.am_getOption("TransType", []) if agentTSTypes: self.transType = agentTSTypes else: self.transType = Operations().getValue( "Transformations/DataManipulation", ["Replication", "Removal"]) return S_OK()
def initializeHandler(cls, svcInfoDict): """ Determines the switching of ElasticSearch and MySQL backends """ try: result = ObjectLoader().loadObject( "WorkloadManagementSystem.DB.JobDB", "JobDB") if not result["OK"]: return result cls.jobDB = result["Value"]() result = ObjectLoader().loadObject( "WorkloadManagementSystem.DB.JobLoggingDB", "JobLoggingDB") if not result["OK"]: return result cls.jobLoggingDB = result["Value"]() except RuntimeError as excp: return S_ERROR("Can't connect to DB: %s" % excp) cls.elasticJobParametersDB = None useESForJobParametersFlag = Operations().getValue( "/Services/JobMonitoring/useESForJobParametersFlag", False) if useESForJobParametersFlag: try: result = ObjectLoader().loadObject( "WorkloadManagementSystem.DB.ElasticJobParametersDB", "ElasticJobParametersDB") if not result["OK"]: return result cls.elasticJobParametersDB = result["Value"]() except RuntimeError as excp: return S_ERROR("Can't connect to DB: %s" % excp) return S_OK()
def initialize(self): """Sets default parameters and creates CE instance""" super(PushJobAgent, self).initialize() result = self._initializeComputingElement("Pool") if not result["OK"]: return result # on-the fly imports ol = ObjectLoader() res = ol.loadModule("ConfigurationSystem.Client.Helpers.Resources") if not res["OK"]: sys.exit(res["Message"]) self.resourcesModule = res["Value"] self.opsHelper = Operations() # Disable Watchdog: we don't need it as pre/post processing occurs locally setup = gConfig.getValue("/DIRAC/Setup", "") if not setup: return S_ERROR("Cannot get the DIRAC Setup value") wms_instance = getSystemInstance("WorkloadManagement") if not wms_instance: return S_ERROR("Cannot get the WorkloadManagement system instance") section = "/Systems/WorkloadManagement/%s/JobWrapper" % wms_instance self._updateConfiguration("CheckWallClockFlag", 0, path=section) self._updateConfiguration("CheckDiskSpaceFlag", 0, path=section) self._updateConfiguration("CheckLoadAvgFlag", 0, path=section) self._updateConfiguration("CheckCPUConsumedFlag", 0, path=section) self._updateConfiguration("CheckCPULimitFlag", 0, path=section) self._updateConfiguration("CheckMemoryLimitFlag", 0, path=section) self._updateConfiguration("CheckTimeLeftFlag", 0, path=section) return S_OK()
def initializeHandler(cls, serviceInfoDict): try: result = ObjectLoader().loadObject("WorkloadManagementSystem.DB.JobDB", "JobDB") if not result["OK"]: return result cls.jobDB = result["Value"]() result = ObjectLoader().loadObject("WorkloadManagementSystem.DB.JobLoggingDB", "JobLoggingDB") if not result["OK"]: return result cls.jobLoggingDB = result["Value"]() result = ObjectLoader().loadObject("WorkloadManagementSystem.DB.TaskQueueDB", "TaskQueueDB") if not result["OK"]: return result cls.taskQueueDB = result["Value"]() result = ObjectLoader().loadObject("WorkloadManagementSystem.DB.PilotAgentsDB", "PilotAgentsDB") if not result["OK"]: return result cls.pilotAgentsDB = result["Value"]() except RuntimeError as excp: return S_ERROR("Can't connect to DB: %s" % excp) cls.limiter = Limiter(jobDB=cls.jobDB) gMonitor.registerActivity("matchTime", "Job matching time", "Matching", "secs", gMonitor.OP_MEAN, 300) gMonitor.registerActivity("matchesDone", "Job Match Request", "Matching", "matches", gMonitor.OP_RATE, 300) gMonitor.registerActivity("matchesOK", "Matched jobs", "Matching", "matches", gMonitor.OP_RATE, 300) gMonitor.registerActivity("numTQs", "Number of Task Queues", "Matching", "tqsk queues", gMonitor.OP_MEAN, 300) return S_OK()
def initialize(self): """Standard initialize.""" res = ObjectLoader().loadObject( "DIRAC.ResourceStatusSystem.Client.ResourceManagementClient") if not res["OK"]: self.log.error( "Failed to load ResourceManagementClient class: %s" % res["Message"]) return res rmClass = res["Value"] res = ObjectLoader().loadObject( "DIRAC.ResourceStatusSystem.Client.ResourceStatusClient") if not res["OK"]: self.log.error("Failed to load ResourceStatusClient class: %s" % res["Message"]) return res rsClass = res["Value"] self.rsClient = rsClass() self.clients["ResourceStatusClient"] = rsClass() self.clients["ResourceManagementClient"] = rmClass() maxNumberOfThreads = self.am_getOption("maxNumberOfThreads", 15) self.log.info("Multithreaded with %d threads" % maxNumberOfThreads) self.threadPoolExecutor = concurrent.futures.ThreadPoolExecutor( max_workers=maxNumberOfThreads) return S_OK()
def initialize(self): """ Standard initialize. """ maxNumberOfThreads = self.am_getOption('maxNumberOfThreads', self.__maxNumberOfThreads) self.threadPool = ThreadPool(maxNumberOfThreads, maxNumberOfThreads) res = ObjectLoader().loadObject( 'DIRAC.ResourceStatusSystem.Client.SiteStatus', 'SiteStatus') if not res['OK']: self.log.error('Failed to load SiteStatus class: %s' % res['Message']) return res siteStatusClass = res['Value'] res = ObjectLoader().loadObject( 'DIRAC.ResourceStatusSystem.Client.ResourceManagementClient', 'ResourceManagementClient') if not res['OK']: self.log.error( 'Failed to load ResourceManagementClient class: %s' % res['Message']) return res rmClass = res['Value'] self.siteClient = siteStatusClass() self.clients['SiteStatus'] = siteStatusClass() self.clients['ResourceManagementClient'] = rmClass() return S_OK()
def registerBackend(self, desiredBackend, backendOptions=None): """ Attach a backend to the Logging object. Convert backend name to backend class name to a Backend object and add it to the Logging object :params desiredBackend: a name attaching to a backend type. list of the possible values: ['stdout', 'stderr', 'file', 'server'] :params backendOptions: dictionary of different backend options. example: FileName='/tmp/log.txt' """ # import ObjectLoader here to avoid a dependancy loop from DIRAC.Core.Utilities.ObjectLoader import ObjectLoader objLoader = ObjectLoader() # Remove white space and capitalize the first letter desiredBackend = desiredBackend.strip() desiredBackend = desiredBackend[0].upper() + desiredBackend[1:] # lock to avoid problem in ObjectLoader which is a singleton not # thread-safe self._lockObjectLoader.acquire() try: # load the Backend class _class = objLoader.loadObject('Resources.LogBackends.%sBackend' % desiredBackend) finally: self._lockObjectLoader.release() if _class['OK']: # add the backend instance to the Logging self._addBackend(_class['Value'](), backendOptions) self._generateBackendFormat() else: self._generateBackendFormat() self.warn("%s is not a valid backend name." % desiredBackend)
def __generateStorageObject(self, storageName, pluginName, parameters, hideExceptions=False): storageType = pluginName if self.proxy: storageType = 'Proxy' objectLoader = ObjectLoader() result = objectLoader.loadObject('Resources.Storage.%sStorage' % storageType, storageType + 'Storage', hideExceptions=hideExceptions) if not result['OK']: gLogger.error('Failed to load storage object: %s' % result['Message']) return result storageClass = result['Value'] try: storage = storageClass(storageName, parameters) except Exception, x: errStr = "StorageFactory._generateStorageObject: Failed to instantiate %s: %s" % ( storageName, x) gLogger.exception(errStr) return S_ERROR(errStr)
def initializeHandler(cls, serviceInfoDict): try: result = ObjectLoader().loadObject( "WorkloadManagementSystem.DB.JobDB", "JobDB") if not result["OK"]: return result cls.jobDB = result["Value"](parentLogger=cls.log) result = ObjectLoader().loadObject( "WorkloadManagementSystem.DB.JobLoggingDB", "JobLoggingDB") if not result["OK"]: return result cls.jobLoggingDB = result["Value"](parentLogger=cls.log) result = ObjectLoader().loadObject( "WorkloadManagementSystem.DB.TaskQueueDB", "TaskQueueDB") if not result["OK"]: return result cls.taskQueueDB = result["Value"](parentLogger=cls.log) result = ObjectLoader().loadObject( "WorkloadManagementSystem.DB.PilotAgentsDB", "PilotAgentsDB") if not result["OK"]: return result cls.pilotAgentsDB = result["Value"](parentLogger=cls.log) except RuntimeError as excp: return S_ERROR("Can't connect to DB: %s" % excp) cls.limiter = Limiter(jobDB=cls.jobDB) return S_OK()
def registerBackend(self, desiredBackend, backendOptions=None): """ Attach a backend to the Logging object. Convert backend name to backend class name to a Backend object and add it to the Logging object :params desiredBackend: a name attaching to a backend type. list of the possible values: ['stdout', 'stderr', 'file', 'server'] :params backendOptions: dictionary of different backend options. example: FileName='/tmp/log.txt' """ # import ObjectLoader here to avoid a dependancy loop from DIRAC.Core.Utilities.ObjectLoader import ObjectLoader objLoader = ObjectLoader() # Remove white space and capitalize the first letter desiredBackend = desiredBackend.strip() desiredBackend = desiredBackend[0].upper() + desiredBackend[1:] # lock to avoid problem in ObjectLoader which is a singleton not thread-safe self._lockObjectLoader.acquire() try: # load the Backend class _class = objLoader.loadObject( 'DIRAC.Resources.LogBackends.%sBackend' % desiredBackend) finally: self._lockObjectLoader.release() if _class['OK']: # add the backend instance to the Logging self._addBackend(_class['Value'](), backendOptions) self._generateBackendFormat() else: self._generateBackendFormat() self.warn("%s is not a valid backend name." % desiredBackend)
def initialize(self): """ Define the commands to be executed, and instantiate the clients that will be used. """ res = ObjectLoader().loadObject('DIRAC.ResourceStatusSystem.Client.ResourceStatusClient', 'ResourceStatusClient') if not res['OK']: self.log.error('Failed to load ResourceStatusClient class: %s' % res['Message']) return res rsClass = res['Value'] res = ObjectLoader().loadObject('DIRAC.ResourceStatusSystem.Client.ResourceManagementClient', 'ResourceManagementClient') if not res['OK']: self.log.error('Failed to load ResourceManagementClient class: %s' % res['Message']) return res rmClass = res['Value'] self.commands['Downtime'] = [{'Downtime': {}}] self.commands['GOCDBSync'] = [{'GOCDBSync': {}}] self.commands['FreeDiskSpace'] = [{'FreeDiskSpace': {}}] # PilotsCommand # self.commands[ 'Pilots' ] = [ # { 'PilotsWMS' : { 'element' : 'Site', 'siteName' : None } }, # { 'PilotsWMS' : { 'element' : 'Resource', 'siteName' : None } } # ] # FIXME: do not forget about hourly vs Always ...etc # AccountingCacheCommand # self.commands[ 'AccountingCache' ] = [ # {'SuccessfullJobsBySiteSplitted' :{'hours' :24, 'plotType' :'Job' }}, # {'FailedJobsBySiteSplitted' :{'hours' :24, 'plotType' :'Job' }}, # {'SuccessfullPilotsBySiteSplitted' :{'hours' :24, 'plotType' :'Pilot' }}, # {'FailedPilotsBySiteSplitted' :{'hours' :24, 'plotType' :'Pilot' }}, # {'SuccessfullPilotsByCESplitted' :{'hours' :24, 'plotType' :'Pilot' }}, # {'FailedPilotsByCESplitted' :{'hours' :24, 'plotType' :'Pilot' }}, # {'RunningJobsBySiteSplitted' :{'hours' :24, 'plotType' :'Job' }}, # # {'RunningJobsBySiteSplitted' :{'hours' :168, 'plotType' :'Job' }}, # # {'RunningJobsBySiteSplitted' :{'hours' :720, 'plotType' :'Job' }}, # # {'RunningJobsBySiteSplitted' :{'hours' :8760, 'plotType' :'Job' }}, # ] # VOBOXAvailability # self.commands[ 'VOBOXAvailability' ] = [ # { 'VOBOXAvailability' : {} } # # Reuse clients for the commands self.clients['GOCDBClient'] = GOCDBClient() self.clients['ReportsClient'] = ReportsClient() self.clients['ResourceStatusClient'] = rsClass() self.clients['ResourceManagementClient'] = rmClass() self.clients['WMSAdministrator'] = WMSAdministratorClient() self.clients['Pilots'] = PilotManagerClient() self.cCaller = CommandCaller return S_OK()
def __init__(self, clients=dict()): """ Constructor examples: >>> pep = PEP() >>> pep1 = PEP( { 'ResourceStatusClient' : ResourceStatusClient() } ) >>> pep2 = PEP( { 'ResourceStatusClient' : ResourceStatusClient(), 'ClientY' : None } ) :Parameters: **clients** - [ None, `dict` ] dictionary with clients to be used in the commands issued by the policies. If not defined, the commands will import them. It is a measure to avoid opening the same connection every time a policy is evaluated. """ self.clients = dict(clients) # Creating the client in the PEP is a convenience for the PDP, that uses internally the RSS clients res = ObjectLoader().loadObject( 'DIRAC.ResourceStatusSystem.Client.ResourceStatusClient', 'ResourceStatusClient') if not res['OK']: self.log.error('Failed to load ResourceStatusClient class: %s' % res['Message']) raise ImportError(res['Message']) rsClass = res['Value'] res = ObjectLoader().loadObject( 'DIRAC.ResourceStatusSystem.Client.ResourceManagementClient', 'ResourceManagementClient') if not res['OK']: self.log.error( 'Failed to load ResourceManagementClient class: %s' % res['Message']) raise ImportError(res['Message']) rmClass = res['Value'] res = ObjectLoader().loadObject( 'DIRAC.ResourceStatusSystem.Client.SiteStatus', 'SiteStatus') if not res['OK']: self.log.error('Failed to load SiteStatus class: %s' % res['Message']) raise ImportError(res['Message']) ssClass = res['Value'] if 'ResourceStatusClient' not in clients: self.clients['ResourceStatusClient'] = rsClass() if 'ResourceManagementClient' not in clients: self.clients['ResourceManagementClient'] = rmClass() if 'SiteStatus' not in clients: self.clients['SiteStatus'] = ssClass() # Pass to the PDP the clients that are going to be used on the Commands self.pdp = PDP(self.clients) self.log = gLogger
def __calculateRoutes( self ): """ Load all handlers and generate the routes """ ol = ObjectLoader( [ 'WebAppDIRAC' ] ) origin = "WebApp.handler" result = ol.getObjects( origin, parentClass = WebHandler, recurse = True ) if not result[ 'OK' ]: return result self.__handlers = result[ 'Value' ] staticPaths = self.getPaths( "static" ) self.log.verbose( "Static paths found:\n - %s" % "\n - ".join( staticPaths ) ) self.__routes = [] for pattern in ( ( r"/static/(.*)", r"/(favicon\.ico)", r"/(robots\.txt)" ) ): if self.__baseURL: pattern = "/%s%s" % ( self.__baseURL, pattern ) self.__routes.append( ( pattern, StaticHandler, dict( pathList = staticPaths ) ) ) for hn in self.__handlers: self.log.info( "Found handler %s" % hn ) handler = self.__handlers[ hn ] #CHeck it has AUTH_PROPS if type( handler.AUTH_PROPS ) == None: return S_ERROR( "Handler %s does not have AUTH_PROPS defined. Fix it!" % hn ) #Get the root for the handler if handler.LOCATION: handlerRoute = handler.LOCATION.strip( "/") else: handlerRoute = hn[ len( origin ): ].replace( ".", "/" ).replace( "Handler", "" ) #Add the setup group RE before baseRoute = self.__setupGroupRE #IF theres a base url like /DIRAC add it if self.__baseURL: baseRoute = "/%s%s" % ( self.__baseURL, baseRoute ) #Set properly the LOCATION after calculating where it is with helpers to add group and setup later handler.LOCATION = handlerRoute handler.PATH_RE = re.compile( "%s(%s/.*)" % ( baseRoute, handlerRoute ) ) handler.URLSCHEMA = "/%s%%(setup)s%%(group)s%%(location)s/%%(action)s" % ( self.__baseURL ) #Look for methods that are exported for mName, mObj in inspect.getmembers( handler ): if inspect.ismethod( mObj ) and mName.find( "web_" ) == 0: if mName == "web_index": #Index methods have the bare url self.log.verbose( " - Route %s -> %s.web_index" % ( handlerRoute, hn ) ) route = "%s(%s/)" % ( baseRoute, handlerRoute ) self.__routes.append( ( route, handler ) ) self.__routes.append( ( route.rstrip( "/" ), CoreHandler, dict( action = 'addSlash' ) ) ) else: #Normal methods get the method appeded without web_ self.log.verbose( " - Route %s/%s -> %s.%s" % ( handlerRoute, mName[4:], hn, mName ) ) route = "%s(%s/%s)" % ( baseRoute, handlerRoute, mName[4:] ) self.__routes.append( ( route, handler ) ) self.log.debug( " * %s" % route ) #Send to root self.__routes.append( ( "%s(/?)" % self.__setupGroupRE, CoreHandler, dict( action = "sendToRoot" ) ) ) if self.__baseURL: self.__routes.append( ( "/%s%s()" % ( self.__baseURL, self.__setupGroupRE ), CoreHandler, dict( action = "sendToRoot" ) ) ) return S_OK()
def createCatalog(self, catalogName, useProxy=False, vo=None, catalogConfig={}): """ Create a file catalog object from its name and CS description """ if useProxy: catalog = FileCatalogProxyClient(catalogName) return S_OK(catalog) # get the CS description first catConfig = catalogConfig if not catConfig: if not vo: result = getVOfromProxyGroup() if not result['OK']: return result vo = result['Value'] reHelper = Resources(vo=vo) result = reHelper.getCatalogOptionsDict(catalogName) if not result['OK']: return result catConfig = result['Value'] catalogType = catConfig.get('CatalogType', catalogName) catalogURL = catConfig.get('CatalogURL', '') self.log.verbose('Creating %s client' % catalogName) objectLoader = ObjectLoader() result = objectLoader.loadObject( 'Resources.Catalog.%sClient' % catalogType, catalogType + 'Client') if not result['OK']: gLogger.error('Failed to load catalog object: %s' % result['Message']) return result catalogClass = result['Value'] try: if catalogType in ['LcgFileCatalogCombined', 'LcgFileCatalog']: # The LFC special case infoSys = catConfig.get('LcgGfalInfosys', '') host = catConfig.get('MasterHost', '') catalog = catalogClass(infoSys, host) else: if catalogURL: catalog = catalogClass(url=catalogURL) else: catalog = catalogClass() self.log.debug('Loaded module %sClient' % catalogType) return S_OK(catalog) except Exception, x: errStr = "Failed to instantiate %s()" % (catalogType) gLogger.exception(errStr, lException=x) return S_ERROR(errStr)
def loadBatchSystem( self ): """ Instantiate object representing the backend batch system """ self.batchSystem = self.ceParameters['BatchSystem'] objectLoader = ObjectLoader() result = objectLoader.loadObject( 'Resources.Computing.BatchSystems.%s' % self.batchSystem, self.batchSystem ) if not result['OK']: gLogger.error( 'Failed to load batch object: %s' % result['Message'] ) return result batchClass = result['Value'] self.batchModuleFile = result['ModuleFile'] self.batch = batchClass() self.log.info( "Batch system class from module: ", self.batchModuleFile )
def createCatalog( self, catalogName, useProxy = False, vo = None, catalogConfig = {} ): """ Create a file catalog object from its name and CS description """ if useProxy: catalog = FileCatalogProxyClient( catalogName ) return S_OK( catalog ) # get the CS description first catConfig = catalogConfig if not catConfig: if not vo: result = getVOfromProxyGroup() if not result['OK']: return result vo = result['Value'] reHelper = Resources( vo = vo ) result = reHelper.getCatalogOptionsDict( catalogName ) if not result['OK']: return result catConfig = result['Value'] catalogType = catConfig.get('CatalogType',catalogName) catalogURL = catConfig.get('CatalogURL','') self.log.verbose( 'Creating %s client' % catalogName ) objectLoader = ObjectLoader() result = objectLoader.loadObject( 'Resources.Catalog.%sClient' % catalogType, catalogType+'Client' ) if not result['OK']: gLogger.error( 'Failed to load catalog object: %s' % result['Message'] ) return result catalogClass = result['Value'] try: if catalogType in ['LcgFileCatalogCombined','LcgFileCatalog']: # The LFC special case infoSys = catConfig.get('LcgGfalInfosys','') host = catConfig.get('MasterHost','') catalog = catalogClass( infoSys, host ) else: if catalogURL: catalog = catalogClass( url = catalogURL ) else: catalog = catalogClass() self.log.debug('Loaded module %sClient' % catalogType ) return S_OK( catalog ) except Exception, x: errStr = "Failed to instantiate %s()" % ( catalogType ) gLogger.exception( errStr, lException = x ) return S_ERROR( errStr )
def __loadLogClass(self, modulePath): """Load class thread-safe.""" # import ObjectLoader here to avoid a dependancy loop from DIRAC.Core.Utilities.ObjectLoader import ObjectLoader objLoader = ObjectLoader() # lock to avoid problem in ObjectLoader which is a singleton not # thread-safe self._lockObjectLoader.acquire() try: # load the Backend class return objLoader.loadObject(modulePath) finally: self._lockObjectLoader.release() return S_ERROR()
def initializeHandler(cls, serviceInfoDict): """Initialization of DB objects and OptimizationMind""" try: result = ObjectLoader().loadObject( "WorkloadManagementSystem.DB.JobDB", "JobDB") if not result["OK"]: return result cls.jobDB = result["Value"](parentLogger=cls.log) result = ObjectLoader().loadObject( "WorkloadManagementSystem.DB.JobLoggingDB", "JobLoggingDB") if not result["OK"]: return result cls.jobLoggingDB = result["Value"](parentLogger=cls.log) result = ObjectLoader().loadObject( "WorkloadManagementSystem.DB.TaskQueueDB", "TaskQueueDB") if not result["OK"]: return result cls.taskQueueDB = result["Value"](parentLogger=cls.log) result = ObjectLoader().loadObject( "WorkloadManagementSystem.DB.PilotAgentsDB", "PilotAgentsDB") if not result["OK"]: return result cls.pilotAgentsDB = result["Value"](parentLogger=cls.log) except RuntimeError as excp: return S_ERROR("Can't connect to DB: %s" % excp) cls.pilotsLoggingDB = None enablePilotsLogging = Operations().getValue( "/Services/JobMonitoring/usePilotsLoggingFlag", False) if enablePilotsLogging: try: result = ObjectLoader().loadObject( "WorkloadManagementSystem.DB.PilotsLoggingDB", "PilotsLoggingDB") if not result["OK"]: return result cls.pilotsLoggingDB = result["Value"](parentLogger=cls.log) except RuntimeError as excp: return S_ERROR("Can't connect to DB: %s" % excp) cls.msgClient = MessageClient("WorkloadManagement/OptimizationMind") result = cls.msgClient.connect(JobManager=True) if not result["OK"]: cls.log.warn("Cannot connect to OptimizationMind!", result["Message"]) return S_OK()
def initializeOptimizer( cls ): objLoader = ObjectLoader() result = objLoader.getObjects( "WorkloadManagementSystem.Splitters", reFilter = ".*Splitter", parentClass = BaseSplitter ) if not result[ 'OK' ]: return result data = result[ 'Value' ] cls.__splitters = {} for k in data: spClass = data[k] spName = k.split(".")[-1][:-8] cls.__splitters[ spName ] = spClass cls.log.notice( "Found %s splitter" % spName ) cls.ex_setOption( "FailedStatus", "Invalid split" ) return S_OK()
class ObjectLoaderMainSuccessScenario(unittest.TestCase): def setUp(self): self.ol = ObjectLoader() def __check(self, result): if not result['OK']: self.fail(result['Message']) return result['Value'] def test_load(self): self.__check(self.ol.loadObject("Core.Utilities.List", 'fromChar')) self.__check(self.ol.loadObject("Core.Utilities.ObjectLoader", "ObjectLoader")) dataFilter = self.__check(self.ol.getObjects("WorkloadManagementSystem.Service", ".*Handler")) dataClass = self.__check(self.ol.getObjects("WorkloadManagementSystem.Service", parentClass=RequestHandler)) self.assertEqual(sorted(dataFilter), sorted(dataClass))
def initialize(self, jobDB=None, logDB=None): """Initialization of the Optimizer Agent.""" self.jobDB = JobDB() if jobDB is None else jobDB if not self.jobDB.isValid(): dExit(1) useESForJobParametersFlag = Operations().getValue( "/Services/JobMonitoring/useESForJobParametersFlag", False) if useESForJobParametersFlag: try: result = ObjectLoader().loadObject( "WorkloadManagementSystem.DB.ElasticJobParametersDB", "ElasticJobParametersDB") if not result["OK"]: return result self.elasticJobParametersDB = result["Value"]() except RuntimeError as excp: return S_ERROR("Can't connect to DB: %s" % excp) self.logDB = JobLoggingDB() if logDB is None else logDB optimizerName = self.am_getModuleParam("agentName") if optimizerName.endswith("Agent"): optimizerName = optimizerName[:-len("Agent")] self.am_setModuleParam("optimizerName", optimizerName) self.startingMinorStatus = self.am_getModuleParam("optimizerName") self.failedStatus = self.am_getOption("FailedJobStatus", JobStatus.FAILED) self.am_setOption("PollingTime", 30) return self.initializeOptimizer()
def initializeOptimizer(cls): objLoader = ObjectLoader() result = objLoader.getObjects("WorkloadManagementSystem.Splitters", reFilter=".*Splitter", parentClass=BaseSplitter) if not result['OK']: return result data = result['Value'] cls.__splitters = {} for k in data: spClass = data[k] spName = k.split(".")[-1][:-8] cls.__splitters[spName] = spClass cls.log.notice("Found %s splitter" % spName) cls.ex_setOption("FailedStatus", "Invalid split") return S_OK()
def initializeHandler(cls, serviceInfo): try: result = ObjectLoader().loadObject( "MonitoringSystem.DB.MonitoringDB", "MonitoringDB") if not result["OK"]: return result cls.__db = result["Value"]() except RuntimeError as excp: return S_ERROR("Can't connect to DB: %s" % excp) reportSection = serviceInfo["serviceSectionPath"] dataPath = gConfig.getValue("%s/DataLocation" % reportSection, "data/monitoringPlots") gLogger.info("Data will be written into %s" % dataPath) mkDir(dataPath) try: testFile = "%s/moni.plot.test" % dataPath with open(testFile, "w") as _: os.unlink(testFile) except IOError as err: gLogger.fatal("Can't write to %s" % dataPath, err) return S_ERROR("Data location is not writable: %s" % repr(err)) gDataCache.setGraphsLocation(dataPath) return S_OK()
def initializeHandler(cls, serviceInfoDict): useMyProxy = cls.srv_getCSOption("UseMyProxy", False) try: result = ObjectLoader().loadObject('FrameworkSystem.DB.ProxyDB', 'ProxyDB') if not result['OK']: gLogger.error('Failed to load ProxyDB class: %s' % result['Message']) return result dbClass = result['Value'] cls.__proxyDB = dbClass(useMyProxy=useMyProxy) except RuntimeError as excp: return S_ERROR("Can't connect to ProxyDB: %s" % excp) gThreadScheduler.addPeriodicTask(900, cls.__proxyDB.purgeExpiredTokens, elapsedTime=900) gThreadScheduler.addPeriodicTask(900, cls.__proxyDB.purgeExpiredRequests, elapsedTime=900) gThreadScheduler.addPeriodicTask(21600, cls.__proxyDB.purgeLogs) gThreadScheduler.addPeriodicTask(3600, cls.__proxyDB.purgeExpiredProxies) gLogger.info("MyProxy: %s\n MyProxy Server: %s" % (useMyProxy, cls.__proxyDB.getMyProxyServer())) return S_OK()
def __getCatalogClass(self, catalogType): result = ObjectLoader().loadObject("Resources.Catalog.%sClient" % catalogType) if not result["OK"]: gLogger.error("Failed to load catalog object", "%s" % result["Message"]) return result
def initializeHandler(cls, serviceInfoDict): """Initialization of Pilots Logging service""" cls.consumersSet = set() try: result = ObjectLoader().loadObject( "WorkloadManagementSystem.DB.PilotsLoggingDB", "PilotsLoggingDB") if not result["OK"]: return result cls.pilotsLoggingDB = result["Value"](parentLogger=cls.log) except RuntimeError as excp: return S_ERROR("Can't connect to DB: %s" % excp) queue = cls.srv_getCSOption("PilotsLoggingQueue") # This is pretty awful hack. Somehow, for uknown reason, I cannot access CS with srv_getCSOption. # The only way is using full CS path, so I'm using it as a backup solution. if not queue: queue = gConfig.getValue(serviceInfoDict["serviceSectionPath"] + "/PilotsLoggingQueue") result = createConsumer(queue, callback=cls.consumingCallback) if result["OK"]: cls.consumersSet.add(result["Value"]) else: return result return S_OK()
class ObjectLoaderMainSuccessScenario( unittest.TestCase ): def setUp( self ): self.ol = ObjectLoader() def __check( self, result ): if not result[ 'OK' ]: self.fail( result[ 'Message' ] ) return result[ 'Value' ] def test_load( self ): self.__check( self.ol.loadObject( "Core.Utilities.List", 'fromChar' ) ) self.__check( self.ol.loadObject( "Core.Utilities.ObjectLoader", "ObjectLoader" ) ) dataFilter = self.__check( self.ol.getObjects( "WorkloadManagementSystem.Service", ".*Handler" ) ) dataClass = self.__check( self.ol.getObjects( "WorkloadManagementSystem.Service", parentClass = RequestHandler ) ) self.assertEqual( sorted( dataFilter.keys() ), sorted( dataClass.keys() ) )
def __init__( self, vo, autoModifyUsers=True, autoAddUsers=True, autoDeleteUsers=False, autoLiftSuspendedStatus=False, syncPluginName=None, ): """VOMS2CSSynchronizer class constructor :param str vo: VO to be synced :param boolean autoModifyUsers: flag to automatically modify user data in CS :param autoAddUsers: flag to automatically add new users to CS :param autoDeleteUsers: flag to automatically delete users from CS if no more in VOMS :param autoLiftSuspendedStatus: flag to automatically remove Suspended status in CS :param syncPluginName: name of the plugin to validate or extend users' info :return: None """ self.log = gLogger.getSubLogger(self.__class__.__name__) self.csapi = CSAPI() self.vo = vo self.vomsVOName = getVOOption(vo, "VOMSName", "") if not self.vomsVOName: raise Exception("VOMS name not defined for VO %s" % vo) self.adminMsgs = {"Errors": [], "Info": []} self.vomsUserDict = {} self.autoModifyUsers = autoModifyUsers self.autoAddUsers = autoAddUsers self.autoDeleteUsers = autoDeleteUsers self.autoLiftSuspendedStatus = autoLiftSuspendedStatus self.voChanged = False self.syncPlugin = None if syncPluginName: objLoader = ObjectLoader() _class = objLoader.loadObject( "ConfigurationSystem.Client.SyncPlugins.%sSyncPlugin" % syncPluginName, "%sSyncPlugin" % syncPluginName ) if not _class["OK"]: raise Exception(_class["Message"]) self.syncPlugin = _class["Value"]()
def loadBatchSystem(self, batchSystemName): """Instantiate object representing the backend batch system :param str batchSystemName: name of the batch system """ if batchSystemName is None: batchSystemName = self.ceParameters["BatchSystem"] objectLoader = ObjectLoader() result = objectLoader.loadObject("Resources.Computing.BatchSystems.%s" % batchSystemName, batchSystemName) if not result["OK"]: self.log.error("Failed to load batch object: %s" % result["Message"]) return result batchClass = result["Value"] batchModuleFile = result["ModuleFile"] self.batchSystem = batchClass() self.log.info("Batch system class from module: ", batchModuleFile) return S_OK()
def loadResourceStatusComponent(moduleName, className): """ Create an object of a given database component. :param moduleName: module name to be loaded :param className: class name :return: object instance wrapped in a standard Dirac return object. """ objectLoader = ObjectLoader() componentModule = 'ResourceStatusSystem.DB.%s' % (moduleName, ) result = objectLoader.loadObject(componentModule, className) if not result['OK']: gLogger.error('Failed to load RSS component', '%s: %s' % (moduleName, result['Message'])) return result componentClass = result['Value'] component = componentClass() return S_OK(component)
def enableCS(self): """ Force the connection the Configuration Server (And incidentaly reinitialize the ObjectLoader and logger) """ res = gRefresher.enable() # This is quite ugly but necessary for the logging # We force the reinitialization of the ObjectLoader # so that it also takes into account the extensions # (since the first time it is loaded by the logger BEFORE the full CS init) # And then we regenerate all the backend if res['OK']: from DIRAC.Core.Utilities.ObjectLoader import ObjectLoader objLoader = ObjectLoader() objLoader.reloadRootModules() self.__initLogger(self.componentName, self.loggingSection, forceInit=True) return res
def setConfig(self, databaseConfig): self.directories = {} # In memory storage of the various parameters self.users = {} self.uids = {} self.groups = {} self.gids = {} self.seNames = {} self.seids = {} # Obtain some general configuration of the database self.uniqueGUID = databaseConfig['UniqueGUID'] self.globalReadAccess = databaseConfig['GlobalReadAccess'] self.lfnPfnConvention = databaseConfig['LFNPFNConvention'] if self.lfnPfnConvention == "None": self.lfnPfnConvention = False self.resolvePfn = databaseConfig['ResolvePFN'] self.umask = databaseConfig['DefaultUmask'] self.validFileStatus = databaseConfig['ValidFileStatus'] self.validReplicaStatus = databaseConfig['ValidReplicaStatus'] self.visibleFileStatus = databaseConfig['VisibleFileStatus'] self.visibleReplicaStatus = databaseConfig['VisibleReplicaStatus'] # Obtain the plugins to be used for DB interaction self.objectLoader = ObjectLoader() # Load the configured components for compAttribute, componentType in [("ugManager", "UserGroupManager"), ("seManager", "SEManager"), ("securityManager", "SecurityManager"), ("dtree", "DirectoryManager"), ("fileManager", "FileManager"), ("datasetManager", "DatasetManager"), ("dmeta", "DirectoryMetadata"), ("fmeta", "FileMetadata")]: result = self.__loadCatalogComponent(componentType, databaseConfig[componentType]) if not result['OK']: return result self.__setattr__(compAttribute, result['Value']) return S_OK()
def __generateStorageObject( self, storageName, pluginName, parameters ): storageType = pluginName if self.proxy: storageType = 'Proxy' objectLoader = ObjectLoader() result = objectLoader.loadObject( 'Resources.Storage.%sStorage' % storageType, storageType + 'Storage' ) if not result['OK']: gLogger.error( 'Failed to load storage object: %s' % result['Message'] ) return result storageClass = result['Value'] try: storage = storageClass( storageName, parameters ) except Exception, x: errStr = "StorageFactory._generateStorageObject: Failed to instantiate %s: %s" % ( storageName, x ) gLogger.exception( errStr ) return S_ERROR( errStr )
def __loadCatalogComponent(self, componentType, componentName): """Create an object of a given catalog component""" componentModule = "DataManagementSystem.DB.FileCatalogComponents.%s.%s" % (componentType, componentName) result = ObjectLoader().loadObject(componentModule) if not result["OK"]: gLogger.error("Failed to load catalog component", "%s: %s" % (componentName, result["Message"])) return result componentClass = result["Value"] component = componentClass(self) return S_OK(component)
def __init__( self, tokens ): """ :param tokens: [ pluginName, =, conditions ] the pluginName is automatically prepended with 'Plugin' """ self.pluginName = "%sPlugin" % tokens[0].strip( ' ' ) self.conditions = tokens[2].strip( ' ' ) # Load the plugin, and give it the condition objLoader = ObjectLoader() _class = objLoader.loadObject( 'Resources.Catalog.ConditionPlugins.%s' % self.pluginName, self.pluginName ) if not _class['OK']: raise Exception( _class['Message'] ) self._pluginInst = _class['Value']( self.conditions )
def bootstrap( self ): gLogger.always( "\n === Bootstrapping REST Server === \n" ) ol = ObjectLoader( [ 'DIRAC', 'RESTDIRAC' ] ) result = ol.getObjects( "RESTSystem.API", parentClass = RESTHandler, recurse = True ) if not result[ 'OK' ]: return result self.__handlers = result[ 'Value' ] if not self.__handlers: return S_ERROR( "No handlers found" ) self.__routes = [ ( self.__handlers[ k ].getRoute(), self.__handlers[k] ) for k in self.__handlers if self.__handlers[ k ].getRoute() ] gLogger.info( "Routes found:" ) for t in sorted( self.__routes ): gLogger.info( " - %s : %s" % ( t[0], t[1].__name__ ) ) balancer = RESTConf.balancer() kw = dict( debug = RESTConf.debug(), log_function = self._logRequest ) if balancer and RESTConf.numProcesses not in ( 0, 1 ): process.fork_processes( RESTConf.numProcesses(), max_restarts = 0 ) kw[ 'debug' ] = False if kw[ 'debug' ]: gLogger.always( "Starting in debug mode" ) self.__app = web.Application( self.__routes, **kw ) port = RESTConf.port() if balancer: gLogger.notice( "Configuring REST HTTP service for balancer %s on port %s" % ( balancer, port ) ) self.__sslops = False else: gLogger.notice( "Configuring REST HTTPS service on port %s" % port ) self.__sslops = dict( certfile = RESTConf.cert(), keyfile = RESTConf.key(), cert_reqs = ssl.CERT_OPTIONAL, ca_certs = RESTConf.generateCAFile() ) self.__httpSrv = httpserver.HTTPServer( self.__app, ssl_options = self.__sslops ) self.__httpSrv.listen( port ) return S_OK()
class FileCatalogDB(DB): __tables = {} __tables["FC_Statuses"] = { "Fields": { "StatusID": "INT AUTO_INCREMENT", "Status": "VARCHAR(32)" }, "UniqueIndexes": { "Status": ["Status"] }, "PrimaryKey":"StatusID" } def __init__( self, databaseLocation='DataManagement/FileCatalogDB', maxQueueSize=10 ): """ Standard Constructor """ # The database location can be specified in System/Database form or in just the Database name # in the DataManagement system db = databaseLocation if db.find('/') == -1: db = 'DataManagement/' + db DB.__init__(self,'FileCatalogDB',db,maxQueueSize) result = self._createTables( self.__tables ) if not result['OK']: gLogger.error( "Failed to create tables", str( self.__tables.keys() ) ) elif result['Value']: gLogger.info( "Tables created: %s" % ','.join( result['Value'] ) ) self.ugManager = None self.seManager = None self.securityManager = None self.dtree = None self.fileManager = None self.dmeta = None self.fmeta = None self.statusDict = {} def setConfig(self,databaseConfig): self.directories = {} # In memory storage of the various parameters self.users = {} self.uids = {} self.groups = {} self.gids = {} self.seNames = {} self.seids = {} self.seDefinitions = {} # Obtain some general configuration of the database self.uniqueGUID = databaseConfig['UniqueGUID'] self.globalReadAccess = databaseConfig['GlobalReadAccess'] self.lfnPfnConvention = databaseConfig['LFNPFNConvention'] if self.lfnPfnConvention == "None": self.lfnPfnConvention = False self.resolvePfn = databaseConfig['ResolvePFN'] self.umask = databaseConfig['DefaultUmask'] self.validFileStatus = databaseConfig['ValidFileStatus'] self.validReplicaStatus = databaseConfig['ValidReplicaStatus'] self.visibleFileStatus = databaseConfig['VisibleFileStatus'] self.visibleReplicaStatus = databaseConfig['VisibleReplicaStatus'] # Obtain the plugins to be used for DB interaction self. objectLoader = ObjectLoader() result = self.__loadCatalogComponent( databaseConfig['UserGroupManager'] ) if not result['OK']: return result self.ugManager = result['Value'] result = self.__loadCatalogComponent( databaseConfig['SEManager'] ) if not result['OK']: return result self.seManager = result['Value'] result = self.__loadCatalogComponent( databaseConfig['SecurityManager'] ) if not result['OK']: return result self.securityManager = result['Value'] result = self.__loadCatalogComponent( databaseConfig['DirectoryManager'] ) if not result['OK']: return result self.dtree = result['Value'] result = self.__loadCatalogComponent( databaseConfig['FileManager'] ) if not result['OK']: return result self.fileManager = result['Value'] result = self.__loadCatalogComponent( databaseConfig['DatasetManager'] ) if not result['OK']: return result self.datasetManager = result['Value'] result = self.__loadCatalogComponent( databaseConfig['DirectoryMetadata'] ) if not result['OK']: return result self.dmeta = result['Value'] result = self.__loadCatalogComponent( databaseConfig['FileMetadata'] ) if not result['OK']: return result self.fmeta = result['Value'] return S_OK() def __loadCatalogComponent( self, componentName ): """ Create an object of a given catalog component """ moduleName = componentName # some modules contain several implementation classes for m in ['SEManager','UserAndGroupManager','SecurityManager']: if m in componentName: moduleName = m componentPath = 'DataManagementSystem.DB.FileCatalogComponents' result = self.objectLoader.loadObject( '%s.%s' % ( componentPath, moduleName ), componentName ) if not result['OK']: gLogger.error( 'Failed to load catalog component', result['Message'] ) return result componentClass = result['Value'] component = componentClass( self ) return S_OK( component ) def setUmask(self,umask): self.umask = umask ######################################################################## # # General purpose utility methods def getStatusInt( self, status, connection = False ): """ Get integer ID of the given status string """ connection = self._getConnection( connection ) req = "SELECT StatusID FROM FC_Statuses WHERE Status = '%s';" % status res = self.db._query( req, connection ) if not res['OK']: return res if res['Value']: return S_OK( res['Value'][0][0] ) req = "INSERT INTO FC_Statuses (Status) VALUES ('%s');" % status res = self.db._update( req, connection ) if not res['OK']: return res return S_OK( res['lastRowId'] ) def getIntStatus(self,statusID,connection=False): """ Get status string for a given integer status ID """ if statusID in self.statusDict: return S_OK(self.statusDict[statusID]) connection = self._getConnection(connection) req = "SELECT StatusID,Status FROM FC_Statuses" res = self.db._query(req,connection) if not res['OK']: return res if res['Value']: for row in res['Value']: self.statusDict[int(row[0])] = row[1] if statusID in self.statusDict: return S_OK(self.statusDict[statusID]) return S_OK('Unknown') ######################################################################## # # SE based write methods # def addSE(self,seName,credDict): res = self._checkAdminPermission(credDict) if not res['OK']: return res if not res['Value']: return S_ERROR("Permission denied") return self.seManager.addSE(seName) def deleteSE(self,seName,credDict): res = self._checkAdminPermission(credDict) if not res['OK']: return res if not res['Value']: return S_ERROR("Permission denied") return self.seManager.deleteSE(seName) ######################################################################## # # User/groups based write methods # def addUser(self,userName,credDict): res = self._checkAdminPermission(credDict) if not res['OK']: return res if not res['Value']: return S_ERROR("Permission denied") return self.ugManager.addUser(userName) def deleteUser(self,userName,credDict): res = self._checkAdminPermission(credDict) if not res['OK']: return res if not res['Value']: return S_ERROR("Permission denied") return self.ugManager.deleteUser(userName) def addGroup(self,groupName,credDict): res = self._checkAdminPermission(credDict) if not res['OK']: return res if not res['Value']: return S_ERROR("Permission denied") return self.ugManager.addGroup(groupName) def deleteGroup(self,groupName,credDict): res = self._checkAdminPermission(credDict) if not res['OK']: return res if not res['Value']: return S_ERROR("Permission denied") return self.ugManager.deleteGroup(groupName) ######################################################################## # # User/groups based read methods # def getUsers(self,credDict): res = self._checkAdminPermission(credDict) if not res['OK']: return res if not res['Value']: return S_ERROR("Permission denied") return self.ugManager.getUsers() def getGroups(self,credDict): res = self._checkAdminPermission(credDict) if not res['OK']: return res if not res['Value']: return S_ERROR("Permission denied") return self.ugManager.getGroups() ######################################################################## # # Path based read methods # def exists(self, lfns, credDict): res = self._checkPathPermissions('Read', lfns, credDict) if not res['OK']: return res failed = res['Value']['Failed'] res = self.fileManager.exists(res['Value']['Successful']) if not res['OK']: return res failed.update(res['Value']['Failed']) successful = res['Value']['Successful'] notExist = [] for lfn in res['Value']['Successful'].keys(): if not successful[lfn]: notExist.append(lfn) successful.pop(lfn) if notExist: res = self.dtree.exists(notExist) if not res['OK']: return res failed.update(res['Value']['Failed']) successful.update(res['Value']['Successful']) return S_OK( {'Successful':successful,'Failed':failed} ) def getPathPermissions(self, lfns, credDict): """ Get permissions for the given user/group to manipulate the given lfns """ res = checkArgumentFormat(lfns) if not res['OK']: return res lfns = res['Value'] return self.securityManager.getPathPermissions( lfns.keys(), credDict ) ######################################################################## # # Path based read methods # def changePathOwner(self, lfns, credDict, recursive=False): """ Change the owner of the given list of paths """ res = self._checkPathPermissions('Write', lfns, credDict) if not res['OK']: return res failed = res['Value']['Failed'] res = self.fileManager.changePathOwner(res['Value']['Successful'],credDict, recursive) if not res['OK']: return res failed.update(res['Value']['Failed']) successful = res['Value']['Successful'] return S_OK({'Successful':successful,'Failed':failed}) def changePathGroup(self, lfns, credDict, recursive=False): """ Change the group of the given list of paths """ res = self._checkPathPermissions('Write', lfns, credDict) if not res['OK']: return res failed = res['Value']['Failed'] res = self.fileManager.changePathGroup(res['Value']['Successful'],credDict, recursive) if not res['OK']: return res failed.update(res['Value']['Failed']) successful = res['Value']['Successful'] return S_OK({'Successful':successful,'Failed':failed}) def changePathMode(self, lfns, credDict, recursive=False): """ Change the mode of the given list of paths """ res = self._checkPathPermissions('Write', lfns, credDict) if not res['OK']: return res failed = res['Value']['Failed'] res = self.fileManager.changePathMode(res['Value']['Successful'],credDict, recursive) if not res['OK']: return res failed.update(res['Value']['Failed']) successful = res['Value']['Successful'] return S_OK({'Successful':successful,'Failed':failed}) ######################################################################## # # File based write methods # def addFile(self, lfns, credDict): res = self._checkPathPermissions('Write', lfns, credDict) if not res['OK']: return res failed = res['Value']['Failed'] res = self.fileManager.addFile(res['Value']['Successful'],credDict) if not res['OK']: return res failed.update(res['Value']['Failed']) successful = res['Value']['Successful'] return S_OK( {'Successful':successful,'Failed':failed} ) def setFileStatus(self, lfns, credDict): res = self._checkPathPermissions('Write', lfns, credDict) if not res['OK']: return res failed = res['Value']['Failed'] res = self.fileManager.setFileStatus( res['Value']['Successful'], credDict ) if not res['OK']: return res failed.update(res['Value']['Failed']) successful = res['Value']['Successful'] return S_OK( {'Successful':successful,'Failed':failed} ) def removeFile(self, lfns, credDict): res = self._checkPathPermissions('Write', lfns, credDict) if not res['OK']: return res failed = res['Value']['Failed'] res = self.fileManager.removeFile(res['Value']['Successful']) if not res['OK']: return res failed.update(res['Value']['Failed']) successful = res['Value']['Successful'] return S_OK( {'Successful':successful,'Failed':failed} ) def addReplica(self, lfns, credDict): res = self._checkPathPermissions('Write', lfns, credDict) if not res['OK']: return res failed = res['Value']['Failed'] res = self.fileManager.addReplica(res['Value']['Successful']) if not res['OK']: return res failed.update(res['Value']['Failed']) successful = res['Value']['Successful'] return S_OK( {'Successful':successful,'Failed':failed} ) def removeReplica(self, lfns, credDict): res = self._checkPathPermissions('Write', lfns, credDict) if not res['OK']: return res failed = res['Value']['Failed'] res = self.fileManager.removeReplica(res['Value']['Successful']) if not res['OK']: return res failed.update(res['Value']['Failed']) successful = res['Value']['Successful'] return S_OK( {'Successful':successful,'Failed':failed} ) def setReplicaStatus(self, lfns, credDict): res = self._checkPathPermissions('Write', lfns, credDict) if not res['OK']: return res failed = res['Value']['Failed'] res = self.fileManager.setReplicaStatus(res['Value']['Successful']) if not res['OK']: return res failed.update(res['Value']['Failed']) successful = res['Value']['Successful'] return S_OK( {'Successful':successful,'Failed':failed} ) def setReplicaHost(self, lfns, credDict): res = self._checkPathPermissions('Write', lfns, credDict) if not res['OK']: return res failed = res['Value']['Failed'] res = self.fileManager.setReplicaHost(res['Value']['Successful']) if not res['OK']: return res failed.update(res['Value']['Failed']) successful = res['Value']['Successful'] return S_OK( {'Successful':successful,'Failed':failed} ) def setFileOwner(self,lfns,credDict): res = self._checkPathPermissions('Write', lfns, credDict) if not res['OK']: return res failed = res['Value']['Failed'] res = self.fileManager.setFileOwner(res['Value']['Successful']) if not res['OK']: return res failed.update(res['Value']['Failed']) successful = res['Value']['Successful'] return S_OK( {'Successful':successful,'Failed':failed} ) def setFileGroup(self,lfns,credDict): res = self._checkPathPermissions('Write', lfns, credDict) if not res['OK']: return res failed = res['Value']['Failed'] res = self.fileManager.setFileGroup(res['Value']['Successful']) if not res['OK']: return res failed.update(res['Value']['Failed']) successful = res['Value']['Successful'] return S_OK( {'Successful':successful,'Failed':failed} ) def setFileMode(self,lfns,credDict): res = self._checkPathPermissions('Write', lfns, credDict) if not res['OK']: return res failed = res['Value']['Failed'] res = self.fileManager.setFileMode(res['Value']['Successful']) if not res['OK']: return res failed.update(res['Value']['Failed']) successful = res['Value']['Successful'] return S_OK( {'Successful':successful,'Failed':failed} ) def addFileAncestors(self,lfns,credDict): """ Add ancestor information for the given LFNs """ res = self._checkPathPermissions('Write', lfns, credDict) if not res['OK']: return res failed = res['Value']['Failed'] res = self.fileManager.addFileAncestors(res['Value']['Successful']) if not res['OK']: return res failed.update(res['Value']['Failed']) successful = res['Value']['Successful'] return S_OK( {'Successful':successful,'Failed':failed} ) ######################################################################## # # File based read methods # def isFile(self, lfns, credDict): res = self._checkPathPermissions('Read', lfns, credDict) if not res['OK']: return res failed = res['Value']['Failed'] res = self.fileManager.isFile(res['Value']['Successful']) if not res['OK']: return res failed.update(res['Value']['Failed']) successful = res['Value']['Successful'] return S_OK( {'Successful':successful,'Failed':failed} ) def getFileSize(self, lfns, credDict): res = self._checkPathPermissions('Read', lfns, credDict) if not res['OK']: return res failed = res['Value']['Failed'] res = self.fileManager.getFileSize(res['Value']['Successful']) if not res['OK']: return res failed.update(res['Value']['Failed']) successful = res['Value']['Successful'] return S_OK( {'Successful':successful,'Failed':failed} ) def getFileMetadata(self, lfns, credDict): res = self._checkPathPermissions('Read', lfns, credDict) if not res['OK']: return res failed = res['Value']['Failed'] res = self.fileManager.getFileMetadata(res['Value']['Successful']) if not res['OK']: return res failed.update(res['Value']['Failed']) successful = res['Value']['Successful'] return S_OK( {'Successful':successful,'Failed':failed} ) def getReplicas(self, lfns, allStatus, credDict): res = self._checkPathPermissions('Read', lfns, credDict) if not res['OK']: return res failed = res['Value']['Failed'] res = self.fileManager.getReplicas(res['Value']['Successful'],allStatus=allStatus) if not res['OK']: return res failed.update(res['Value']['Failed']) successful = res['Value']['Successful'] return S_OK( {'Successful':successful, 'Failed':failed, 'SEPrefixes': res['Value'].get( 'SEPrefixes', {} ) } ) def getReplicaStatus(self, lfns, credDict): res = self._checkPathPermissions('Read', lfns, credDict) if not res['OK']: return res failed = res['Value']['Failed'] res = self.fileManager.getReplicaStatus(res['Value']['Successful']) if not res['OK']: return res failed.update(res['Value']['Failed']) successful = res['Value']['Successful'] return S_OK( {'Successful':successful,'Failed':failed} ) def getFileAncestors(self, lfns, depths, credDict): res = self._checkPathPermissions('Read', lfns, credDict) if not res['OK']: return res failed = res['Value']['Failed'] res = self.fileManager.getFileAncestors(res['Value']['Successful'],depths) if not res['OK']: return res failed.update(res['Value']['Failed']) successful = res['Value']['Successful'] return S_OK( {'Successful':successful,'Failed':failed} ) def getFileDescendents(self, lfns, depths, credDict): res = self._checkPathPermissions('Read', lfns, credDict) if not res['OK']: return res failed = res['Value']['Failed'] res = self.fileManager.getFileDescendents(res['Value']['Successful'],depths) if not res['OK']: return res failed.update(res['Value']['Failed']) successful = res['Value']['Successful'] return S_OK( {'Successful':successful,'Failed':failed} ) def getFileDetails( self, lfnList, credDict ): """ Get all the metadata for the given files """ connection = False result = self.fileManager._findFiles( lfnList, connection=connection ) if not result['OK']: return result resultDict = {} fileIDDict = {} lfnDict = result['Value']['Successful'] for lfn in lfnDict: fileIDDict[lfnDict[lfn]['FileID']] = lfn result = self.fileManager._getFileMetadataByID( fileIDDict.keys(), connection=connection ) if not result['OK']: return result for fileID in result['Value']: resultDict[ fileIDDict[fileID] ] = result['Value'][fileID] result = self.fmeta._getFileUserMetadataByID( fileIDDict.keys(), credDict, connection=connection ) if not result['OK']: return result for fileID in fileIDDict: resultDict[ fileIDDict[fileID] ].setdefault( 'Metadata', {} ) if fileID in result['Value']: resultDict[ fileIDDict[fileID] ]['Metadata'] = result['Value'][fileID] return S_OK(resultDict) ######################################################################## # # Directory based Write methods # def createDirectory(self,lfns,credDict): res = self._checkPathPermissions('Write', lfns, credDict) if not res['OK']: return res failed = res['Value']['Failed'] res = self.dtree.createDirectory(res['Value']['Successful'],credDict) if not res['OK']: return res failed.update(res['Value']['Failed']) successful = res['Value']['Successful'] return S_OK( {'Successful':successful,'Failed':failed} ) def removeDirectory(self,lfns,credDict): res = self._checkPathPermissions('Write', lfns, credDict) if not res['OK']: return res failed = res['Value']['Failed'] successful = res['Value']['Successful'] if successful: res = self.dtree.removeDirectory(res['Value']['Successful'],credDict) if not res['OK']: return res failed.update(res['Value']['Failed']) successful = res['Value']['Successful'] if not successful: return S_OK( {'Successful':successful,'Failed':failed} ) else: return S_OK( {'Successful':successful,'Failed':failed} ) # Remove the directory metadata now dirIdList = [ successful[p]['DirID'] for p in successful ] result = self.dmeta.removeMetadataForDirectory( dirIdList,credDict ) if not result['OK']: return result failed.update(result['Value']['Failed']) successful = result['Value']['Successful'] return S_OK( {'Successful':successful,'Failed':failed} ) ######################################################################## # # Directory based read methods # def listDirectory(self,lfns,credDict,verbose=False): res = self._checkPathPermissions('Read', lfns, credDict) if not res['OK']: return res failed = res['Value']['Failed'] res = self.dtree.listDirectory(res['Value']['Successful'],verbose=verbose) if not res['OK']: return res failed.update(res['Value']['Failed']) successful = res['Value']['Successful'] return S_OK( {'Successful':successful,'Failed':failed} ) def isDirectory(self,lfns,credDict): res = self._checkPathPermissions('Read', lfns, credDict) if not res['OK']: return res failed = res['Value']['Failed'] res = self.dtree.isDirectory(res['Value']['Successful']) if not res['OK']: return res failed.update(res['Value']['Failed']) successful = res['Value']['Successful'] return S_OK( {'Successful':successful,'Failed':failed} ) def getDirectoryReplicas(self,lfns,allStatus,credDict): res = self._checkPathPermissions('Read', lfns, credDict) if not res['OK']: return res failed = res['Value']['Failed'] res = self.dtree.getDirectoryReplicas(res['Value']['Successful'],allStatus) if not res['OK']: return res failed.update(res['Value']['Failed']) successful = res['Value']['Successful'] return S_OK( { 'Successful':successful, 'Failed':failed, 'SEPrefixes': res['Value'].get( 'SEPrefixes', {} )} ) def getDirectorySize(self,lfns,longOutput,fromFiles,credDict): res = self._checkPathPermissions('Read', lfns, credDict) if not res['OK']: return res failed = res['Value']['Failed'] res = self.dtree.getDirectorySize(res['Value']['Successful'],longOutput,fromFiles) if not res['OK']: return res failed.update(res['Value']['Failed']) successful = res['Value']['Successful'] queryTime = res['Value'].get('QueryTime',-1.) return S_OK( {'Successful':successful,'Failed':failed,'QueryTime':queryTime} ) def rebuildDirectoryUsage(self): """ Rebuild DirectoryUsage table from scratch """ result = self.dtree._rebuildDirectoryUsage() return result def repairCatalog( self, directoryFlag=True, credDict={} ): """ Repair catalog inconsistencies """ result = S_OK() if directoryFlag: result = self.dtree.recoverOrphanDirectories( credDict ) return result ####################################################################### # # Catalog metadata methods # def setMetadata(self, path, metadataDict, credDict): """ Add metadata to the given path """ res = self._checkPathPermissions('Write', path, credDict) if not res['OK']: return res if not res['Value']['Successful']: return S_ERROR('Permission denied') if not res['Value']['Successful'][path]: return S_ERROR('Permission denied') result = self.dtree.isDirectory({path:True}) if not result['OK']: return result if not result['Value']['Successful']: return S_ERROR('Failed to determine the path type') if result['Value']['Successful'][path]: # This is a directory return self.dmeta.setMetadata(path,metadataDict,credDict) else: # This is a file return self.fmeta.setMetadata(path,metadataDict,credDict) def setMetadataBulk( self, pathMetadataDict, credDict ): """ Add metadata for the given paths """ successful = {} failed = {} for path, metadataDict in pathMetadataDict.items(): result = self.setMetadata( path, metadataDict, credDict ) if result['OK']: successful[path] = True else: failed[path] = result['Message'] return S_OK( { 'Successful': successful, 'Failed': failed } ) def removeMetadata(self, path, metadata, credDict): """ Add metadata to the given path """ res = self._checkPathPermissions('Write', path, credDict) if not res['OK']: return res if not res['Value']['Successful']: return S_ERROR('Permission denied') if not res['Value']['Successful'][path]: return S_ERROR('Permission denied') result = self.dtree.isDirectory({path:True}) if not result['OK']: return result if not result['Value']['Successful']: return S_ERROR('Failed to determine the path type') if result['Value']['Successful'][path]: # This is a directory return self.dmeta.removeMetadata(path,metadata,credDict) else: # This is a file return self.fmeta.removeMetadata(path,metadata,credDict) ####################################################################### # # Catalog admin methods # def getCatalogCounters(self,credDict): counterDict = {} res = self._checkAdminPermission(credDict) if not res['OK']: return res if not res['Value']: return S_ERROR("Permission denied") #res = self.dtree.getDirectoryCounters() #if not res['OK']: # return res #counterDict.update(res['Value']) res = self.fileManager.getFileCounters() if not res['OK']: return res counterDict.update(res['Value']) res = self.fileManager.getReplicaCounters() if not res['OK']: return res counterDict.update(res['Value']) res = self.dtree.getDirectoryCounters() if not res['OK']: return res counterDict.update(res['Value']) return S_OK(counterDict) ######################################################################## # # Security based methods # def _checkAdminPermission(self,credDict): return self.securityManager.hasAdminAccess(credDict) def _checkPathPermissions(self,operation,lfns,credDict): res = checkArgumentFormat(lfns) if not res['OK']: return res lfns = res['Value'] res = self.securityManager.hasAccess(operation,lfns.keys(),credDict) if not res['OK']: return res # Do not consider those paths for which we failed to determine access failed = res['Value']['Failed'] for lfn in failed.keys(): lfns.pop(lfn) # Do not consider those paths for which access is denied successful = {} for lfn,access in res['Value']['Successful'].items(): if not access: failed[lfn] = 'Permission denied' else: successful[lfn] = lfns[lfn] return S_OK( {'Successful':successful,'Failed':failed} )
def setConfig(self,databaseConfig): self.directories = {} # In memory storage of the various parameters self.users = {} self.uids = {} self.groups = {} self.gids = {} self.seNames = {} self.seids = {} self.seDefinitions = {} # Obtain some general configuration of the database self.uniqueGUID = databaseConfig['UniqueGUID'] self.globalReadAccess = databaseConfig['GlobalReadAccess'] self.lfnPfnConvention = databaseConfig['LFNPFNConvention'] if self.lfnPfnConvention == "None": self.lfnPfnConvention = False self.resolvePfn = databaseConfig['ResolvePFN'] self.umask = databaseConfig['DefaultUmask'] self.validFileStatus = databaseConfig['ValidFileStatus'] self.validReplicaStatus = databaseConfig['ValidReplicaStatus'] self.visibleFileStatus = databaseConfig['VisibleFileStatus'] self.visibleReplicaStatus = databaseConfig['VisibleReplicaStatus'] # Obtain the plugins to be used for DB interaction self. objectLoader = ObjectLoader() result = self.__loadCatalogComponent( databaseConfig['UserGroupManager'] ) if not result['OK']: return result self.ugManager = result['Value'] result = self.__loadCatalogComponent( databaseConfig['SEManager'] ) if not result['OK']: return result self.seManager = result['Value'] result = self.__loadCatalogComponent( databaseConfig['SecurityManager'] ) if not result['OK']: return result self.securityManager = result['Value'] result = self.__loadCatalogComponent( databaseConfig['DirectoryManager'] ) if not result['OK']: return result self.dtree = result['Value'] result = self.__loadCatalogComponent( databaseConfig['FileManager'] ) if not result['OK']: return result self.fileManager = result['Value'] result = self.__loadCatalogComponent( databaseConfig['DatasetManager'] ) if not result['OK']: return result self.datasetManager = result['Value'] result = self.__loadCatalogComponent( databaseConfig['DirectoryMetadata'] ) if not result['OK']: return result self.dmeta = result['Value'] result = self.__loadCatalogComponent( databaseConfig['FileMetadata'] ) if not result['OK']: return result self.fmeta = result['Value'] return S_OK()
def setUp( self ): self.ol = ObjectLoader()