def initializeFileCatalogHandler(serviceInfo): """ handler initialisation """ global gFileCatalogDB dbLocation = getServiceOption(serviceInfo, 'Database', 'DataManagement/FileCatalogDB') gFileCatalogDB = FileCatalogDB(dbLocation) databaseConfig = {} # Obtain the plugins to be used for DB interaction gLogger.info("Initializing with FileCatalog with following managers:") defaultManagers = { 'UserGroupManager': 'UserAndGroupManagerDB', 'SEManager': 'SEManagerDB', 'SecurityManager': 'NoSecurityManager', 'DirectoryManager': 'DirectoryLevelTree', 'FileManager': 'FileManager', 'DirectoryMetadata': 'DirectoryMetadata', 'FileMetadata': 'FileMetadata', 'DatasetManager': 'DatasetManager' } for configKey in sortList(defaultManagers.keys()): defaultValue = defaultManagers[configKey] configValue = getServiceOption(serviceInfo, configKey, defaultValue) gLogger.info("%-20s : %-20s" % (str(configKey), str(configValue))) databaseConfig[configKey] = configValue # Obtain some general configuration of the database gLogger.info( "Initializing the FileCatalog with the following configuration:") defaultConfig = { 'UniqueGUID': False, 'GlobalReadAccess': True, 'LFNPFNConvention': 'Strong', 'ResolvePFN': True, 'DefaultUmask': 0775, 'ValidFileStatus': ['AprioriGood', 'Trash', 'Removing', 'Probing'], 'ValidReplicaStatus': ['AprioriGood', 'Trash', 'Removing', 'Probing'], 'VisibleFileStatus': ['AprioriGood'], 'VisibleReplicaStatus': ['AprioriGood'] } for configKey in sortList(defaultConfig.keys()): defaultValue = defaultConfig[configKey] configValue = getServiceOption(serviceInfo, configKey, defaultValue) gLogger.info("%-20s : %-20s" % (str(configKey), str(configValue))) databaseConfig[configKey] = configValue res = gFileCatalogDB.setConfig(databaseConfig) return res
class ReqManagerHandler(RequestHandler): """ .. class:: ReqManagerHandler RequestDB interface in the DISET framework. """ # # request validator __validator = None # # request DB instance __requestDB = None @classmethod def initializeHandler(cls, serviceInfoDict): """ initialize handler """ try: cls.__requestDB = RequestDB() except RuntimeError, error: gLogger.exception(error) return S_ERROR(error) # If there is a constant delay to be applied to each request cls.constantRequestDelay = getServiceOption(serviceInfoDict, 'ConstantRequestDelay', 0) # # create tables for empty db return cls.__requestDB.createTables()
def initializeResourceManagementHandler(serviceInfo): """ Handler initialization, where we: dynamically load ResourceManagement database plugin module, as advised by the config, (assumes that the module name and a class name are the same) set the ResourceManagementDB as global db. :param _serviceInfo: service info dictionary :return: standard Dirac return object """ gLogger.debug("ServiceInfo", serviceInfo) gLogger.debug("Initializing ResourceManagement Service with the following DB component:") defaultOption, defaultClass = 'ResourceManagementDB', 'ResourceManagementDB' configValue = getServiceOption(serviceInfo, defaultOption, defaultClass) gLogger.debug("Option:%-20s Class:%-20s" % (str(defaultOption), str(configValue))) result = loadResourceStatusComponent(configValue, configValue) if not result['OK']: return result global db db = result['Value'] syncObject = Synchronizer.Synchronizer() gConfig.addListenerToNewVersionEvent(syncObject.sync) return S_OK()
def initializeHandler(cls, serviceInfo): """Handler initialization""" dbLocation = getServiceOption(serviceInfo, "Database", "DataManagement/FileCatalogDB") cls.fileCatalogDB = FileCatalogDB(dbLocation, parentLogger=cls.log) databaseConfig = {} # Obtain the plugins to be used for DB interaction cls.log.info("Initializing with FileCatalog with following managers:") defaultManagers = { "UserGroupManager": "UserAndGroupManagerDB", "SEManager": "SEManagerDB", "SecurityManager": "NoSecurityManager", "DirectoryManager": "DirectoryLevelTree", "FileManager": "FileManager", "DirectoryMetadata": "DirectoryMetadata", "FileMetadata": "FileMetadata", "DatasetManager": "DatasetManager", } for configKey in sorted(defaultManagers.keys()): defaultValue = defaultManagers[configKey] configValue = getServiceOption(serviceInfo, configKey, defaultValue) cls.log.info("%-20s : %-20s" % (str(configKey), str(configValue))) databaseConfig[configKey] = configValue # Obtain some general configuration of the database cls.log.info("Initializing the FileCatalog with the following configuration:") defaultConfig = { "UniqueGUID": False, "GlobalReadAccess": True, "LFNPFNConvention": "Strong", "ResolvePFN": True, "DefaultUmask": 0o775, "ValidFileStatus": ["AprioriGood", "Trash", "Removing", "Probing"], "ValidReplicaStatus": ["AprioriGood", "Trash", "Removing", "Probing"], "VisibleFileStatus": ["AprioriGood"], "VisibleReplicaStatus": ["AprioriGood"], } for configKey in sorted(defaultConfig.keys()): defaultValue = defaultConfig[configKey] configValue = getServiceOption(serviceInfo, configKey, defaultValue) cls.log.info("%-20s : %-20s" % (str(configKey), str(configValue))) databaseConfig[configKey] = configValue res = cls.fileCatalogDB.setConfig(databaseConfig) return res
def initializeFileCatalogHandler( serviceInfo ): """ handler initialisation """ global gFileCatalogDB dbLocation = getServiceOption( serviceInfo, 'Database', 'DataManagement/FileCatalogDB' ) gFileCatalogDB = FileCatalogDB( dbLocation ) databaseConfig = {} # Obtain the plugins to be used for DB interaction gLogger.info( "Initializing with FileCatalog with following managers:" ) defaultManagers = { 'UserGroupManager' : 'UserAndGroupManagerDB', 'SEManager' : 'SEManagerDB', 'SecurityManager' : 'NoSecurityManager', 'DirectoryManager' : 'DirectoryLevelTree', 'FileManager' : 'FileManager', 'DirectoryMetadata' : 'DirectoryMetadata', 'FileMetadata' : 'FileMetadata', 'DatasetManager' : 'DatasetManager' } for configKey in sortList( defaultManagers.keys() ): defaultValue = defaultManagers[configKey] configValue = getServiceOption( serviceInfo, configKey, defaultValue ) gLogger.info( "%-20s : %-20s" % ( str( configKey ), str( configValue ) ) ) databaseConfig[configKey] = configValue # Obtain some general configuration of the database gLogger.info( "Initializing the FileCatalog with the following configuration:" ) defaultConfig = { 'UniqueGUID' : False, 'GlobalReadAccess' : True, 'LFNPFNConvention' : 'Strong', 'ResolvePFN' : True, 'DefaultUmask' : 0775, 'ValidFileStatus' : ['AprioriGood','Trash','Removing','Probing'], 'ValidReplicaStatus' : ['AprioriGood','Removing','Probing'], 'VisibleFileStatus' : ['AprioriGood'], 'VisibleReplicaStatus': ['AprioriGood']} for configKey in sortList( defaultConfig.keys() ): defaultValue = defaultConfig[configKey] configValue = getServiceOption( serviceInfo, configKey, defaultValue ) gLogger.info( "%-20s : %-20s" % ( str( configKey ), str( configValue ) ) ) databaseConfig[configKey] = configValue res = gFileCatalogDB.setConfig( databaseConfig ) return res
def initializeHandler(cls, serviceInfoDict): """ initialize handler """ try: maxThreads = getServiceOption(serviceInfoDict, 'MaxThreads', 15) cls.fts3db = FTS3DB(pool_size=maxThreads) except RuntimeError as error: gLogger.exception(error) return S_ERROR(error) # # create tables for empty db return cls.fts3db.createTables()
def initializeHandler(cls, serviceInfoDict): """ initialize handler """ try: maxThreads = getServiceOption(serviceInfoDict, 'MaxThreads', 15) cls.fts3db = FTS3DB(pool_size=maxThreads) except RuntimeError as error: gLogger.exception(error) return S_ERROR(error) # # create tables for empty db return cls.fts3db.createTables()
def initializeHandler(cls, serviceInfoDict): """Initialize handler""" gLogger.notice("CacheDirectory: %s" % cls.cacheDir()) gMonitor.registerActivity("reqSwept", "Request successfully swept", "ReqProxy", "Requests/min", gMonitor.OP_SUM) gMonitor.registerActivity("reqFailed", "Request forward failed", "ReqProxy", "Requests/min", gMonitor.OP_SUM) gMonitor.registerActivity("reqReceived", "Request received", "ReqProxy", "Requests/min", gMonitor.OP_SUM) cls.sweepSize = getServiceOption(serviceInfoDict, "SweepSize", 10) gLogger.notice("SweepSize: %s" % cls.sweepSize) return S_OK()
def initializeHandler(cls, svcInfoDict): multiPath = PathFinder.getDatabaseSection("Accounting/MultiDB") cls.__acDB = MultiAccountingDB(multiPath) # we can run multiple services in read only mode. In that case we do not bucket cls.runBucketing = getServiceOption(svcInfoDict, "RunBucketing", True) if cls.runBucketing: cls.__acDB.autoCompactDB() # pylint: disable=no-member result = cls.__acDB.markAllPendingRecordsAsNotTaken() # pylint: disable=no-member if not result["OK"]: return result gThreadScheduler.addPeriodicTask(60, cls.__acDB.loadPendingRecords) # pylint: disable=no-member return S_OK()
def initializeHandler( cls, svcInfoDict ): multiPath = PathFinder.getDatabaseSection( "Accounting/MultiDB" ) cls.__acDB = MultiAccountingDB( multiPath ) #we can run multiple services in read only mode. In that case we do not bucket cls.runBucketing = getServiceOption( svcInfoDict, 'RunBucketing', True ) if cls.runBucketing: cls.__acDB.autoCompactDB() #pylint: disable=no-member result = cls.__acDB.markAllPendingRecordsAsNotTaken() #pylint: disable=no-member if not result[ 'OK' ]: return result gThreadScheduler.addPeriodicTask( 60, cls.__acDB.loadPendingRecords ) #pylint: disable=no-member return S_OK()
def initializeFileCatalogHandler(serviceInfo): """ handler initialisation """ global gFileCatalogDB dbLocation = getServiceOption(serviceInfo, "Database", "DataManagement/FileCatalogDB") gFileCatalogDB = FileCatalogDB(dbLocation) databaseConfig = {} # Obtain the plugins to be used for DB interaction gLogger.info("Initializing with FileCatalog with following managers:") defaultManagers = { "UserGroupManager": "UserAndGroupManagerDB", "SEManager": "SEManagerDB", "SecurityManager": "NoSecurityManager", "DirectoryManager": "DirectoryLevelTree", "FileManager": "FileManager", "DirectoryMetadata": "DirectoryMetadata", "FileMetadata": "FileMetadata", "DatasetManager": "DatasetManager", } for configKey in sortList(defaultManagers.keys()): defaultValue = defaultManagers[configKey] configValue = getServiceOption(serviceInfo, configKey, defaultValue) gLogger.info("%-20s : %-20s" % (str(configKey), str(configValue))) databaseConfig[configKey] = configValue # Obtain some general configuration of the database gLogger.info("Initializing the FileCatalog with the following configuration:") defaultConfig = { "UniqueGUID": False, "GlobalReadAccess": True, "LFNPFNConvention": "Strong", "ResolvePFN": True, "DefaultUmask": 0775, "ValidFileStatus": ["AprioriGood", "Trash", "Removing", "Probing"], "ValidReplicaStatus": ["AprioriGood", "Trash", "Removing", "Probing"], "VisibleFileStatus": ["AprioriGood"], "VisibleReplicaStatus": ["AprioriGood"], }
def initializeStorageElementHandler(serviceInfo): """ Initialize Storage Element global settings """ global BASE_PATH global USE_TOKENS global MAX_STORAGE_SIZE BASE_PATH = getServiceOption(serviceInfo, "BasePath", BASE_PATH) if not BASE_PATH: gLogger.error('Failed to get the base path') return S_ERROR('Failed to get the base path') mkDir(BASE_PATH) USE_TOKENS = getServiceOption(serviceInfo, "%UseTokens", USE_TOKENS) MAX_STORAGE_SIZE = convertSizeUnits(getServiceOption(serviceInfo, "MaxStorageSize", MAX_STORAGE_SIZE), 'MB', 'B') gLogger.info('Starting DIRAC Storage Element') gLogger.info('Base Path: %s' % BASE_PATH) gLogger.info('Max size: %d Bytes' % MAX_STORAGE_SIZE) gLogger.info('Use access control tokens: ' + str(USE_TOKENS)) return S_OK()
def initializeStorageElementHandler(serviceInfo): """Initialize Storage Element global settings""" global BASE_PATH global USE_TOKENS global MAX_STORAGE_SIZE BASE_PATH = getServiceOption(serviceInfo, "BasePath", "") if not BASE_PATH: gLogger.error("Failed to get the base path") return S_ERROR("Failed to get the base path") mkDir(BASE_PATH) USE_TOKENS = getServiceOption(serviceInfo, "UseTokens", USE_TOKENS) MAX_STORAGE_SIZE = convertSizeUnits( getServiceOption(serviceInfo, "MaxStorageSize", MAX_STORAGE_SIZE), "MB", "B") gLogger.info("Starting DIRAC Storage Element") gLogger.info("Base Path: %s" % BASE_PATH) gLogger.info("Max size: %d Bytes" % MAX_STORAGE_SIZE) gLogger.info("Use access control tokens: " + str(USE_TOKENS)) return S_OK()
def initializeStorageElementHandler(serviceInfo): """ Initialize Storage Element global settings """ global BASE_PATH global USE_TOKENS global MAX_STORAGE_SIZE BASE_PATH = getServiceOption(serviceInfo, "BasePath", BASE_PATH) if not BASE_PATH: gLogger.error("Failed to get the base path") return S_ERROR("Failed to get the base path") if not os.path.exists(BASE_PATH): os.makedirs(BASE_PATH) USE_TOKENS = getServiceOption(serviceInfo, "%UseTokens", USE_TOKENS) MAX_STORAGE_SIZE = getServiceOption(serviceInfo, "MaxStorageSize", MAX_STORAGE_SIZE) gLogger.info("Starting DIRAC Storage Element") gLogger.info("Base Path: %s" % BASE_PATH) gLogger.info("Max size: %d MB" % MAX_STORAGE_SIZE) gLogger.info("Use access control tokens: " + str(USE_TOKENS)) return S_OK()
def initializeHandler(cls, serviceInfoDict): """ initialize handler """ try: cls.__requestDB = RequestDB() except RuntimeError as error: gLogger.exception(error) return S_ERROR(error) # If there is a constant delay to be applied to each request cls.constantRequestDelay = getServiceOption(serviceInfoDict, 'ConstantRequestDelay', 0) # # create tables for empty db return cls.__requestDB.createTables()
def initializeHandler(cls, serviceInfoDict): """ Dynamically loads ResourceManagement database plugin module, as advised by the config, (assumes that the module name and a class name are the same) :param serviceInfoDict: service info dictionary :return: standard Dirac return object """ defaultOption, defaultClass = 'ResourceManagementDB', 'ResourceManagementDB' configValue = getServiceOption(serviceInfoDict, defaultOption, defaultClass) result = loadResourceStatusComponent(configValue, configValue) if not result['OK']: return result cls.db = result['Value'] syncObject = Synchronizer.Synchronizer() gConfig.addListenerToNewVersionEvent(syncObject.sync) return S_OK()
def initializeHandler(cls, serviceInfoDict): useMyProxy = cls.srv_getCSOption("UseMyProxy", False) mailFrom = getServiceOption(serviceInfoDict, "MailFrom", DEFAULT_MAIL_FROM) try: result = ObjectLoader().loadObject("FrameworkSystem.DB.ProxyDB") if not result["OK"]: gLogger.error("Failed to load ProxyDB class: %s" % result["Message"]) return result dbClass = result["Value"] cls.__proxyDB = dbClass(useMyProxy=useMyProxy, mailFrom=mailFrom) except RuntimeError as excp: return S_ERROR("Can't connect to ProxyDB: %s" % excp) gThreadScheduler.addPeriodicTask(900, cls.__proxyDB.purgeExpiredTokens, elapsedTime=900) gThreadScheduler.addPeriodicTask(900, cls.__proxyDB.purgeExpiredRequests, elapsedTime=900) gThreadScheduler.addPeriodicTask(21600, cls.__proxyDB.purgeLogs) gThreadScheduler.addPeriodicTask(3600, cls.__proxyDB.purgeExpiredProxies) if useMyProxy: gLogger.info("MyProxy: %s\n MyProxy Server: %s" % (useMyProxy, cls.__proxyDB.getMyProxyServer())) return S_OK()
def initializeHandler(cls, serviceInfoDict): """ Handler initialization, where we: dynamically load ResourceStatus database plugin module, as advised by the config, (assumes that the module name and a class name are the same) set the ResourceManagementDB as global db. :param serviceInfoDict: service info dictionary :return: standard Dirac return object """ defaultOption, defaultClass = "ResourceStatusDB", "ResourceStatusDB" configValue = getServiceOption(serviceInfoDict, defaultOption, defaultClass) result = loadResourceStatusComponent(configValue, configValue, parentLogger=cls.log) if not result["OK"]: return result cls.db = result["Value"] return S_OK()
def initializeHandler(cls, serviceInfoDict): useMyProxy = cls.srv_getCSOption("UseMyProxy", False) mailFrom = getServiceOption(serviceInfoDict, "MailFrom", DEFAULT_MAIL_FROM) try: result = ObjectLoader().loadObject("FrameworkSystem.DB.ProxyDB") if not result["OK"]: gLogger.error("Failed to load ProxyDB class: %s" % result["Message"]) return result dbClass = result["Value"] cls.__proxyDB = dbClass(useMyProxy=useMyProxy, mailFrom=mailFrom, parentLogger=cls.log) except RuntimeError as excp: return S_ERROR("Can't connect to ProxyDB", repr(excp)) if useMyProxy: gLogger.info("MyProxy: %s\n MyProxy Server: %s" % (useMyProxy, cls.__proxyDB.getMyProxyServer())) return S_OK()
def initializeHandler(cls, serviceInfo): """ Handler initialization """ dbLocation = getServiceOption(serviceInfo, 'Database', 'DataManagement/FileCatalogDB') cls.fileCatalogDB = FileCatalogDB(dbLocation) databaseConfig = {} # Obtain the plugins to be used for DB interaction cls.log.info("Initializing with FileCatalog with following managers:") defaultManagers = {'UserGroupManager': 'UserAndGroupManagerDB', 'SEManager': 'SEManagerDB', 'SecurityManager': 'NoSecurityManager', 'DirectoryManager': 'DirectoryLevelTree', 'FileManager': 'FileManager', 'DirectoryMetadata': 'DirectoryMetadata', 'FileMetadata': 'FileMetadata', 'DatasetManager': 'DatasetManager'} for configKey in sorted(defaultManagers.keys()): defaultValue = defaultManagers[configKey] configValue = getServiceOption(serviceInfo, configKey, defaultValue) cls.log.info("%-20s : %-20s" % (str(configKey), str(configValue))) databaseConfig[configKey] = configValue # Obtain some general configuration of the database cls.log.info("Initializing the FileCatalog with the following configuration:") defaultConfig = {'UniqueGUID': False, 'GlobalReadAccess': True, 'LFNPFNConvention': 'Strong', 'ResolvePFN': True, 'DefaultUmask': 0o775, 'ValidFileStatus': ['AprioriGood', 'Trash', 'Removing', 'Probing'], 'ValidReplicaStatus': ['AprioriGood', 'Trash', 'Removing', 'Probing'], 'VisibleFileStatus': ['AprioriGood'], 'VisibleReplicaStatus': ['AprioriGood']} for configKey in sorted(defaultConfig.keys()): defaultValue = defaultConfig[configKey] configValue = getServiceOption(serviceInfo, configKey, defaultValue) cls.log.info("%-20s : %-20s" % (str(configKey), str(configValue))) databaseConfig[configKey] = configValue res = cls.fileCatalogDB.setConfig(databaseConfig) gMonitor.registerActivity("AddFile", "Amount of addFile calls", "FileCatalogHandler", "calls/min", gMonitor.OP_SUM) gMonitor.registerActivity("AddFileSuccessful", "Files successfully added", "FileCatalogHandler", "files/min", gMonitor.OP_SUM) gMonitor.registerActivity("AddFileFailed", "Files failed to add", "FileCatalogHandler", "files/min", gMonitor.OP_SUM) gMonitor.registerActivity("RemoveFile", "Amount of removeFile calls", "FileCatalogHandler", "calls/min", gMonitor.OP_SUM) gMonitor.registerActivity("RemoveFileSuccessful", "Files successfully removed", "FileCatalogHandler", "files/min", gMonitor.OP_SUM) gMonitor.registerActivity("RemoveFileFailed", "Files failed to remove", "FileCatalogHandler", "files/min", gMonitor.OP_SUM) gMonitor.registerActivity("AddReplica", "Amount of addReplica calls", "FileCatalogHandler", "calls/min", gMonitor.OP_SUM) gMonitor.registerActivity("AddReplicaSuccessful", "Replicas successfully added", "FileCatalogHandler", "replicas/min", gMonitor.OP_SUM) gMonitor.registerActivity("AddReplicaFailed", "Replicas failed to add", "FileCatalogHandler", "replicas/min", gMonitor.OP_SUM) gMonitor.registerActivity("RemoveReplica", "Amount of removeReplica calls", "FileCatalogHandler", "calls/min", gMonitor.OP_SUM) gMonitor.registerActivity("RemoveReplicaSuccessful", "Replicas successfully removed", "FileCatalogHandler", "replicas/min", gMonitor.OP_SUM) gMonitor.registerActivity("RemoveReplicaFailed", "Replicas failed to remove", "FileCatalogHandler", "replicas/min", gMonitor.OP_SUM) gMonitor.registerActivity("ListDirectory", "Amount of listDirectory calls", "FileCatalogHandler", "calls/min", gMonitor.OP_SUM) return res
def initialize(self): # Build the URLs self._url = self._cfg.getURL() if not self._url: return S_ERROR("Could not build service URL for %s" % self._name) gLogger.verbose("Service URL is %s" % self._url) # Load handler result = self._loadHandlerInit() if not result['OK']: return result self._handler = result['Value'] # Initialize lock manager self._lockManager = LockManager(self._cfg.getMaxWaitingPetitions()) # TODO: remove ThreadPool if useThreadPoolExecutor: self._threadPool = ThreadPoolExecutor( max(0, self._cfg.getMaxThreads())) else: self._threadPool = ThreadPool(max(1, self._cfg.getMinThreads()), max(0, self._cfg.getMaxThreads()), self._cfg.getMaxWaitingPetitions()) self._threadPool.daemonize() self._msgBroker = MessageBroker("%sMSB" % self._name, threadPool=self._threadPool) # Create static dict self._serviceInfoDict = { 'serviceName': self._name, 'serviceSectionPath': PathFinder.getServiceSection(self._name), 'URL': self._cfg.getURL(), 'messageSender': MessageSender(self._name, self._msgBroker), 'validNames': self._validNames, 'csPaths': [ PathFinder.getServiceSection(svcName) for svcName in self._validNames ] } # Initialize Monitoring # This is a flag used to check whether "EnableActivityMonitoring" is enabled or not from the config file. self.activityMonitoring = (Operations().getValue( "EnableActivityMonitoring", False) or getServiceOption( self._serviceInfoDict, "EnableActivityMonitoring", False)) if self.activityMonitoring: # The import needs to be here because of the CS must be initialized before importing # this class (see https://github.com/DIRACGrid/DIRAC/issues/4793) from DIRAC.MonitoringSystem.Client.MonitoringReporter import MonitoringReporter self.activityMonitoringReporter = MonitoringReporter( monitoringType="ComponentMonitoring") gThreadScheduler.addPeriodicTask( 100, self.__activityMonitoringReporting) elif self._standalone: self._monitor = gMonitor else: self._monitor = MonitoringClient() self._initMonitoring() # Call static initialization function try: if self.activityMonitoring: self._handler['class']._rh__initializeClass( dict(self._serviceInfoDict), self._lockManager, self._msgBroker, self.activityMonitoringReporter) else: self._handler['class']._rh__initializeClass( dict(self._serviceInfoDict), self._lockManager, self._msgBroker, self._monitor) if self._handler['init']: for initFunc in self._handler['init']: gLogger.verbose("Executing initialization function") try: result = initFunc(dict(self._serviceInfoDict)) except Exception as excp: gLogger.exception( "Exception while calling initialization function", lException=excp) return S_ERROR( "Exception while calling initialization function: %s" % str(excp)) if not isReturnStructure(result): return S_ERROR( "Service initialization function %s must return S_OK/S_ERROR" % initFunc) if not result['OK']: return S_ERROR("Error while initializing %s: %s" % (self._name, result['Message'])) except Exception as e: errMsg = "Exception while initializing %s" % self._name gLogger.exception(e) gLogger.exception(errMsg) return S_ERROR(errMsg) # Load actions after the handler has initialized itself result = self._loadActions() if not result['OK']: return result self._actions = result['Value'] if not self.activityMonitoring: gThreadScheduler.addPeriodicTask(30, self.__reportThreadPoolContents) return S_OK()
def initialize(self): # Build the URLs self._url = self._cfg.getURL() if not self._url: return S_ERROR("Could not build service URL for %s" % self._name) gLogger.verbose("Service URL is %s" % self._url) # Load handler result = self._loadHandlerInit() if not result["OK"]: return result self._handler = result["Value"] # Initialize lock manager self._lockManager = LockManager(self._cfg.getMaxWaitingPetitions()) self._threadPool = ThreadPoolExecutor(max(0, self._cfg.getMaxThreads())) self._msgBroker = MessageBroker("%sMSB" % self._name, threadPool=self._threadPool) # Create static dict self._serviceInfoDict = { "serviceName": self._name, "serviceSectionPath": PathFinder.getServiceSection(self._name), "URL": self._cfg.getURL(), "messageSender": MessageSender(self._name, self._msgBroker), "validNames": self._validNames, "csPaths": [ PathFinder.getServiceSection(svcName) for svcName in self._validNames ], } self.securityLogging = Operations().getValue( "EnableSecurityLogging", True) and getServiceOption( self._serviceInfoDict, "EnableSecurityLogging", True) # Initialize Monitoring # The import needs to be here because of the CS must be initialized before importing # this class (see https://github.com/DIRACGrid/DIRAC/issues/4793) from DIRAC.MonitoringSystem.Client.MonitoringReporter import MonitoringReporter self.activityMonitoringReporter = MonitoringReporter( monitoringType="ServiceMonitoring") self._initMonitoring() # Call static initialization function try: self._handler["class"]._rh__initializeClass( dict(self._serviceInfoDict), self._lockManager, self._msgBroker, self.activityMonitoringReporter) if self._handler["init"]: for initFunc in self._handler["init"]: gLogger.verbose("Executing initialization function") try: result = initFunc(dict(self._serviceInfoDict)) except Exception as excp: gLogger.exception( "Exception while calling initialization function", lException=excp) return S_ERROR( "Exception while calling initialization function: %s" % str(excp)) if not isReturnStructure(result): return S_ERROR( "Service initialization function %s must return S_OK/S_ERROR" % initFunc) if not result["OK"]: return S_ERROR("Error while initializing %s: %s" % (self._name, result["Message"])) except Exception as e: errMsg = "Exception while initializing %s" % self._name gLogger.exception(e) gLogger.exception(errMsg) return S_ERROR(errMsg) if self.activityMonitoring: gThreadScheduler.addPeriodicTask(30, self.__reportActivity) gThreadScheduler.addPeriodicTask( 100, self.__activityMonitoringReporting) # Load actions after the handler has initialized itself result = self._loadActions() if not result["OK"]: return result self._actions = result["Value"] return S_OK()
def initializeFileCatalogHandler( serviceInfo ): """ handler initialisation """ global gFileCatalogDB dbLocation = getServiceOption( serviceInfo, 'Database', 'DataManagement/FileCatalogDB' ) gFileCatalogDB = FileCatalogDB( dbLocation ) databaseConfig = {} # Obtain the plugins to be used for DB interaction gLogger.info( "Initializing with FileCatalog with following managers:" ) defaultManagers = { 'UserGroupManager' : 'UserAndGroupManagerDB', 'SEManager' : 'SEManagerDB', 'SecurityManager' : 'NoSecurityManager', 'DirectoryManager' : 'DirectoryLevelTree', 'FileManager' : 'FileManager', 'DirectoryMetadata' : 'DirectoryMetadata', 'FileMetadata' : 'FileMetadata', 'DatasetManager' : 'DatasetManager' } for configKey in sorted( defaultManagers.keys() ): defaultValue = defaultManagers[configKey] configValue = getServiceOption( serviceInfo, configKey, defaultValue ) gLogger.info( "%-20s : %-20s" % ( str( configKey ), str( configValue ) ) ) databaseConfig[configKey] = configValue # Obtain some general configuration of the database gLogger.info( "Initializing the FileCatalog with the following configuration:" ) defaultConfig = { 'UniqueGUID' : False, 'GlobalReadAccess' : True, 'LFNPFNConvention' : 'Strong', 'ResolvePFN' : True, 'DefaultUmask' : 0775, 'ValidFileStatus' : ['AprioriGood','Trash','Removing','Probing'], 'ValidReplicaStatus' : ['AprioriGood','Trash','Removing','Probing'], 'VisibleFileStatus' : ['AprioriGood'], 'VisibleReplicaStatus': ['AprioriGood']} for configKey in sorted( defaultConfig.keys() ): defaultValue = defaultConfig[configKey] configValue = getServiceOption( serviceInfo, configKey, defaultValue ) gLogger.info( "%-20s : %-20s" % ( str( configKey ), str( configValue ) ) ) databaseConfig[configKey] = configValue res = gFileCatalogDB.setConfig( databaseConfig ) gMonitor.registerActivity( "AddFile", "Amount of addFile calls", "FileCatalogHandler", "calls/min", gMonitor.OP_SUM ) gMonitor.registerActivity( "AddFileSuccessful", "Files successfully added", "FileCatalogHandler", "files/min", gMonitor.OP_SUM ) gMonitor.registerActivity( "AddFileFailed", "Files failed to add", "FileCatalogHandler", "files/min", gMonitor.OP_SUM ) gMonitor.registerActivity( "RemoveFile", "Amount of removeFile calls", "FileCatalogHandler", "calls/min", gMonitor.OP_SUM ) gMonitor.registerActivity( "RemoveFileSuccessful", "Files successfully removed", "FileCatalogHandler", "files/min", gMonitor.OP_SUM ) gMonitor.registerActivity( "RemoveFileFailed", "Files failed to remove", "FileCatalogHandler", "files/min", gMonitor.OP_SUM ) gMonitor.registerActivity( "AddReplica", "Amount of addReplica calls", "FileCatalogHandler", "calls/min", gMonitor.OP_SUM ) gMonitor.registerActivity( "AddReplicaSuccessful", "Replicas successfully added", "FileCatalogHandler", "replicas/min", gMonitor.OP_SUM ) gMonitor.registerActivity( "AddReplicaFailed", "Replicas failed to add", "FileCatalogHandler", "replicas/min", gMonitor.OP_SUM ) gMonitor.registerActivity( "RemoveReplica", "Amount of removeReplica calls", "FileCatalogHandler", "calls/min", gMonitor.OP_SUM ) gMonitor.registerActivity( "RemoveReplicaSuccessful", "Replicas successfully removed", "FileCatalogHandler", "replicas/min", gMonitor.OP_SUM ) gMonitor.registerActivity( "RemoveReplicaFailed", "Replicas failed to remove", "FileCatalogHandler", "replicas/min", gMonitor.OP_SUM ) gMonitor.registerActivity( "ListDirectory", "Amount of listDirectory calls", "FileCatalogHandler", "calls/min", gMonitor.OP_SUM ) return res
def initializeHandler(cls, serviceInfoDict): """Initialize handler""" gLogger.notice("CacheDirectory: %s" % cls.cacheDir()) cls.sweepSize = getServiceOption(serviceInfoDict, "SweepSize", 10) gLogger.notice(f"SweepSize: {cls.sweepSize}") return S_OK()
# Obtain some general configuration of the database gLogger.info("Initializing the FileCatalog with the following configuration:") defaultConfig = { "UniqueGUID": False, "GlobalReadAccess": True, "LFNPFNConvention": "Strong", "ResolvePFN": True, "DefaultUmask": 0775, "ValidFileStatus": ["AprioriGood", "Trash", "Removing", "Probing"], "ValidReplicaStatus": ["AprioriGood", "Trash", "Removing", "Probing"], "VisibleFileStatus": ["AprioriGood"], "VisibleReplicaStatus": ["AprioriGood"], } for configKey in sortList(defaultConfig.keys()): defaultValue = defaultConfig[configKey] configValue = getServiceOption(serviceInfo, configKey, defaultValue) gLogger.info("%-20s : %-20s" % (str(configKey), str(configValue))) databaseConfig[configKey] = configValue res = gFileCatalogDB.setConfig(databaseConfig) gMonitor.registerActivity("AddFile", "Amount of addFile calls", "FileCatalogHandler", "calls/min", gMonitor.OP_SUM) gMonitor.registerActivity( "AddFileSuccessful", "Files successfully added", "FileCatalogHandler", "files/min", gMonitor.OP_SUM ) gMonitor.registerActivity( "AddFileFailed", "Files failed to add", "FileCatalogHandler", "files/min", gMonitor.OP_SUM ) gMonitor.registerActivity( "RemoveFile", "Amount of removeFile calls", "FileCatalogHandler", "calls/min", gMonitor.OP_SUM )