def run( self ): while self.__automaticUpdate: iWaitTime = gConfigurationData.getPropagationTime() time.sleep( iWaitTime ) if self.__refreshEnabled: if not self.__refreshAndPublish(): gLogger.error( "Can't refresh configuration from any source" )
def getSectionTree(self, root = '', substr = ''): """ Creates a list of all subsections starting from given root. List can be filtered by setting `substr` parameter. :param:`root` - string: Starting point in the CS tree. :param:`substr` - string: Select only results that contains given substring. :return: Returns a list of strings containing full path taken form CS """ if substr and substr in root: result = [root] else: result = [] # get subsections of the root sections = self.getSections( root ) if not sections['OK']: gLogger.error('getSectionTree', "getSection() failed with message: %s" % sections['Message']) return S_ERROR('Invalid root path provided') # recursively go through subsections and get their subsections for section in sections['Value']: subtree = self.getSectionTree("%s/%s" % ( root, section ), substr) if not subtree['OK']: gLogger.error('getSectionTree', "getSection() failed with message: %s" % sections['Message']) return S_ERROR('CS content was altered during the operation') result.extend(subtree['Value']) return S_OK(result)
def __checkThreadID(self): """ ..warning:: just guessing.... This seems to check that we are not creating a client and then using it in a multithreaded environment. However, it is triggered only if self.__enableThreadCheck is to True, but it is hardcoded to False, and does not seem to be modified anywhere in the code. """ if not self.__initStatus["OK"]: return self.__initStatus cThID = thread.get_ident() if not self.__allowedThreadID: self.__allowedThreadID = cThID elif cThID != self.__allowedThreadID: msgTxt = """ =======DISET client thread safety error======================== Client %s can only run on thread %s and this is thread %s ===============================================================""" % ( str(self), self.__allowedThreadID, cThID, ) gLogger.error("DISET client thread safety error", msgTxt)
def __readConf(self): """ read configurations """ # Getting all the possible servers res = getFTS3ServerDict() if not res['OK']: gLogger.error(res['Message']) return res srvDict = res['Value'] serverPolicyType = opHelper().getValue('DataManagement/FTSPlacement/FTS3/ServerPolicy', 'Random') self._serverPolicy = FTS3Utilities.FTS3ServerPolicy(srvDict, serverPolicy=serverPolicyType) self.maxNumberOfThreads = self.am_getOption("MaxThreads", 10) # Number of Operation we treat in one loop self.operationBulkSize = self.am_getOption("OperationBulkSize", 20) # Number of Jobs we treat in one loop self.jobBulkSize = self.am_getOption("JobBulkSize", 20) self.maxFilesPerJob = self.am_getOption("MaxFilesPerJob", 100) self.maxAttemptsPerFile = self.am_getOption("MaxAttemptsPerFile", 256) self.kickDelay = self.am_getOption("KickAssignedHours", 1) self.maxKick = self.am_getOption("KickLimitPerCycle", 100) self.deleteDelay = self.am_getOption("DeleteGraceDays", 180) self.maxDelete = self.am_getOption("DeleteLimitPerCycle", 100) return S_OK()
def __backupCurrentConfiguration(self, backupName): configurationFilename = "%s.cfg" % self.getName() configurationFile = os.path.join(DIRAC.rootPath, "etc", configurationFilename) today = Time.date() backupPath = os.path.join(self.getBackupDir(), str(today.year), "%02d" % today.month) mkDir(backupPath) backupFile = os.path.join( backupPath, configurationFilename.replace(".cfg", ".%s.zip" % backupName)) if os.path.isfile(configurationFile): gLogger.info("Making a backup of configuration in %s" % backupFile) try: with zipfile.ZipFile(backupFile, "w", zipfile.ZIP_DEFLATED) as zf: zf.write( configurationFile, "%s.backup.%s" % (os.path.split(configurationFile)[1], backupName)) except Exception: gLogger.exception() gLogger.error("Cannot backup configuration data file", "file %s" % backupFile) else: gLogger.warn("CS data file does not exist", configurationFile)
def _refreshAndPublish(self): """ Refresh configuration and publish local updates """ self._lastUpdateTime = time.time() gLogger.info("Refreshing from master server") sMasterServer = gConfigurationData.getMasterServer() if sMasterServer: from DIRAC.ConfigurationSystem.Client.ConfigurationClient import ConfigurationClient oClient = ConfigurationClient( url=sMasterServer, timeout=self._timeout, useCertificates=gConfigurationData.useServerCertificate(), skipCACheck=gConfigurationData.skipCACheck(), ) dRetVal = _updateFromRemoteLocation(oClient) if not dRetVal["OK"]: gLogger.error("Can't update from master server", dRetVal["Message"]) return False if gConfigurationData.getAutoPublish(): gLogger.info("Publishing to master server...") dRetVal = oClient.publishSlaveServer(self._url) if not dRetVal["OK"]: gLogger.error("Can't publish to master server", dRetVal["Message"]) return True else: gLogger.warn( "No master server is specified in the configuration, trying to get data from other slaves" ) return self._refresh()["OK"]
def __realTrigger( self, eventName, params ): gEventSync.lock() try: if eventName not in self.__events: return S_ERROR( "Event %s is not registered" % eventName ) if eventName in self.__processingEvents: return S_OK( 0 ) eventFunctors = list( self.__events[ eventName ] ) self.__processingEvents.add( eventName ) finally: gEventSync.unlock() finalResult = S_OK() for functor in eventFunctors: try: result = functor( eventName, params ) except Exception: gLogger.exception( "Listener %s for event %s raised an exception" % ( functor.__name__, eventName ) ) continue if type( result ) != types.DictType or 'OK' not in result: gLogger.error( "Listener for event did not return a S_OK/S_ERROR structure", "%s %s" % ( functor.__name__, eventName ) ) continue if not result[ 'OK' ]: finalResult = result break gEventSync.lock() try: self.__processingEvents.discard( eventName ) finally: try: gEventSync.unlock() except: pass if not finalResult[ 'OK' ]: return finalResult return S_OK( len( eventFunctors ) )
def _rh_executeAction(self, proposalTuple): """ Execute an action. @type actionTuple: tuple @param actionTuple: Type of action to execute. First position of the tuple must be the type of action to execute. The second position is the action itself. """ actionTuple = proposalTuple[1] gLogger.debug("Executing %s:%s action" % actionTuple) startTime = time.time() actionType = actionTuple[0] if actionType == "RPC": retVal = self.__doRPC(actionTuple[1]) elif actionType == "FileTransfer": retVal = self.__doFileTransfer(actionTuple[1]) elif actionType == "Connection": retVal = self.__doConnection(actionTuple[1]) else: raise Exception("Unknown action (%s)" % actionType) if not retVal: message = "Method %s for action %s does not have a return value!" % ( actionTuple[1], actionTuple[0]) gLogger.error(message) retVal = S_ERROR(message) self.__logRemoteQueryResponse(retVal, time.time() - startTime) return self.__trPool.send(self.__trid, retVal)
def __refreshAndPublish(self): self.__lastUpdateTime = time.time() gLogger.info("Refreshing from master server") from DIRAC.Core.DISET.RPCClient import RPCClient sMasterServer = gConfigurationData.getMasterServer() if sMasterServer: oClient = RPCClient( sMasterServer, timeout=self.__timeout, useCertificates=gConfigurationData.useServerCertificate(), skipCACheck=gConfigurationData.skipCACheck()) dRetVal = _updateFromRemoteLocation(oClient) if not dRetVal['OK']: gLogger.error("Can't update from master server", dRetVal['Message']) return False if gConfigurationData.getAutoPublish(): gLogger.info("Publishing to master server...") dRetVal = oClient.publishSlaveServer(self.__url) if not dRetVal['OK']: gLogger.error("Can't publish to master server", dRetVal['Message']) return True else: gLogger.warn( "No master server is specified in the configuration, trying to get data from other slaves" ) return self.__refresh()['OK']
def run(self): while self.__automaticUpdate: iWaitTime = gConfigurationData.getPropagationTime() time.sleep(iWaitTime) if self.__refreshEnabled: if not self.__refreshAndPublish(): gLogger.error("Can't refresh configuration from any source")
def _rh_executeMessageCallback( self, msgObj ): msgName = msgObj.getName() if not self.__msgBroker.getMsgFactory().messageExists( self.__svcName, msgName ): return S_ERROR( "Unknown message %s" % msgName ) methodName = "msg_%s" % msgName self.__logRemoteQuery( "Message/%s" % methodName, msgObj.dumpAttrs() ) startTime = time.time() try: oMethod = getattr( self, methodName ) except: return S_ERROR( "Handler function for message %s does not exist!" % msgName ) self.__lockManager.lock( methodName ) try: try: uReturnValue = oMethod( msgObj ) except Exception, v: gLogger.exception( "Uncaught exception when serving message", methodName ) return S_ERROR( "Server error while serving %s: %s" % ( msgName, str( v ) ) ) finally: self.__lockManager.unlock( methodName ) if not isReturnStructure( uReturnValue ): gLogger.error( "Message does not return a S_OK/S_ERROR", msgName ) uReturnValue = S_ERROR( "Message %s does not return a S_OK/S_ERROR" % msgName ) self.__logRemoteQueryResponse( uReturnValue, time.time() - startTime ) return uReturnValue
def __readConf(self): """ read configurations """ # Getting all the possible servers res = getFTS3ServerDict() if not res['OK']: gLogger.error(res['Message']) return res srvDict = res['Value'] serverPolicyType = opHelper().getValue('DataManagement/FTSPlacement/FTS3/ServerPolicy', 'Random') self._serverPolicy = FTS3Utilities.FTS3ServerPolicy(srvDict, serverPolicy=serverPolicyType) # List of third party protocols for transfers self.thirdPartyProtocols = DMSHelpers().getThirdPartyProtocols() self.maxNumberOfThreads = self.am_getOption("MaxThreads", 10) # Number of Operation we treat in one loop self.operationBulkSize = self.am_getOption("OperationBulkSize", 20) # Number of Jobs we treat in one loop self.jobBulkSize = self.am_getOption("JobBulkSize", 20) self.maxFilesPerJob = self.am_getOption("MaxFilesPerJob", 100) self.maxAttemptsPerFile = self.am_getOption("MaxAttemptsPerFile", 256) self.kickDelay = self.am_getOption("KickAssignedHours", 1) self.maxKick = self.am_getOption("KickLimitPerCycle", 100) self.deleteDelay = self.am_getOption("DeleteGraceDays", 180) self.maxDelete = self.am_getOption("DeleteLimitPerCycle", 100) return S_OK()
def __cbRecvMsg(self, trid, msgObj): msgName = msgObj.getName() msgObj.setMsgClient(self) for cb in self.__specialCallbacks['msg']: try: result = cb(self, msgObj) if not isReturnStructure(result): gLogger.error("Callback for message does not return S_OK/S_ERROR", msgObj.getName()) return S_ERROR("No response") if not result['OK']: return result # If no specific callback but a generic one, return the generic one if msgName not in self.__callbacks: return result except BaseException: gLogger.exception("Exception while processing callbacks", msgObj.getName()) if msgName not in self.__callbacks: return S_ERROR("Unexpected message") try: result = self.__callbacks[msgName](msgObj) if not isReturnStructure(result): gLogger.error("Callback for message does not return S_OK/S_ERROR", msgName) return S_ERROR("No response") return result except BaseException: gLogger.exception("Exception while processing callbacks", msgName) return S_ERROR("No response")
def sendMessage(self, msgObj): if not self.__trid: result = self.connect() if not result["OK"]: gLogger.error("Failed connect for sending", "%r" % msgObj) return result return self.__msgBroker.sendMessage(self.__trid, msgObj)
def _rh_executeAction( self, proposalTuple ): """ Execute an action. @type actionTuple: tuple @param actionTuple: Type of action to execute. First position of the tuple must be the type of action to execute. The second position is the action itself. """ actionTuple = proposalTuple[1] gLogger.debug( "Executing %s:%s action" % actionTuple ) startTime = time.time() actionType = actionTuple[0] if actionType == "RPC": retVal = self.__doRPC( actionTuple[1] ) elif actionType == "FileTransfer": retVal = self.__doFileTransfer( actionTuple[1] ) elif actionType == "Connection": retVal = self.__doConnection( actionTuple[1] ) else: raise Exception( "Unknown action (%s)" % actionType ) if not retVal: message = "Method %s for action %s does not have a return value!" % ( actionTuple[1], actionTuple[0] ) gLogger.error( message ) retVal = S_ERROR( message ) self.__logRemoteQueryResponse( retVal, time.time() - startTime ) return self.__trPool.send( self.__trid, retVal )
def _rh_executeAction( self, proposalTuple ): """ Execute an action. :type proposalTuple: tuple :param proposalTuple: Type of action to execute. First position of the tuple must be the type of action to execute. The second position is the action itself. """ actionTuple = proposalTuple[1] gLogger.debug( "Executing %s:%s action" % actionTuple ) startTime = time.time() actionType = actionTuple[0] self.serviceInfoDict[ 'actionTuple' ] = actionTuple try: if actionType == "RPC": retVal = self.__doRPC( actionTuple[1] ) elif actionType == "FileTransfer": retVal = self.__doFileTransfer( actionTuple[1] ) elif actionType == "Connection": retVal = self.__doConnection( actionTuple[1] ) else: return S_ERROR( "Unknown action %s" % actionType ) except RequestHandler.ConnectionError, excp: gLogger.error( "ConnectionError", str( excp ) ) return S_ERROR( excp )
def _rh_executeAction(self, proposalTuple): """ Execute an action. @type proposalTuple: tuple @param proposalTuple: Type of action to execute. First position of the tuple must be the type of action to execute. The second position is the action itself. """ actionTuple = proposalTuple[1] gLogger.debug("Executing %s:%s action" % actionTuple) startTime = time.time() actionType = actionTuple[0] self.serviceInfoDict['actionTuple'] = actionTuple try: if actionType == "RPC": retVal = self.__doRPC(actionTuple[1]) elif actionType == "FileTransfer": retVal = self.__doFileTransfer(actionTuple[1]) elif actionType == "Connection": retVal = self.__doConnection(actionTuple[1]) else: return S_ERROR("Unknown action %s" % actionType) except RequestHandler.ConnectionError, excp: gLogger.error("ConnectionError", str(excp)) return S_ERROR(excp)
def _rh_executeMessageCallback(self, msgObj): msgName = msgObj.getName() if not self.__msgBroker.getMsgFactory().messageExists( self.__svcName, msgName): return S_ERROR("Unknown message %s" % msgName) methodName = "msg_%s" % msgName self.__logRemoteQuery("Message/%s" % methodName, msgObj.dumpAttrs()) startTime = time.time() try: oMethod = getattr(self, methodName) except: return S_ERROR("Handler function for message %s does not exist!" % msgName) self.__lockManager.lock(methodName) try: try: uReturnValue = oMethod(msgObj) except Exception, v: gLogger.exception("Uncaught exception when serving message", methodName) return S_ERROR("Server error while serving %s: %s" % (msgName, str(v))) finally: self.__lockManager.unlock(methodName) if not isReturnStructure(uReturnValue): gLogger.error("Message does not return a S_OK/S_ERROR", msgName) uReturnValue = S_ERROR( "Message %s does not return a S_OK/S_ERROR" % msgName) self.__logRemoteQueryResponse(uReturnValue, time.time() - startTime) return uReturnValue
def __refreshAndPublish(self): self.__lastUpdateTime = time.time() gLogger.info("Refreshing from master server") from DIRAC.Core.DISET.RPCClient import RPCClient sMasterServer = gConfigurationData.getMasterServer() if sMasterServer: oClient = RPCClient( sMasterServer, timeout=self.__timeout, useCertificates=gConfigurationData.useServerCertificate(), skipCACheck=gConfigurationData.skipCACheck(), ) dRetVal = _updateFromRemoteLocation(oClient) if not dRetVal["OK"]: gLogger.error("Can't update from master server", dRetVal["Message"]) return False if gConfigurationData.getAutoPublish(): gLogger.info("Publishing to master server...") dRetVal = oClient.publishSlaveServer(self.__url) if not dRetVal["OK"]: gLogger.error("Can't publish to master server", dRetVal["Message"]) return True else: gLogger.warn("No master server is specified in the configuration, trying to get data from other slaves") return self.__refresh()["OK"]
def __readConf(self): """Read configurations :return: S_OK()/S_ERROR() """ # Getting all the possible servers res = getFTS3ServerDict() if not res["OK"]: gLogger.error(res["Message"]) return res srvDict = res["Value"] serverPolicyType = opHelper().getValue("DataManagement/FTSPlacement/FTS3/ServerPolicy", "Random") self._serverPolicy = FTS3Utilities.FTS3ServerPolicy(srvDict, serverPolicy=serverPolicyType) self.maxNumberOfThreads = self.am_getOption("MaxThreads", 10) # Number of Operation we treat in one loop self.operationBulkSize = self.am_getOption("OperationBulkSize", 20) # Number of Jobs we treat in one loop self.jobBulkSize = self.am_getOption("JobBulkSize", 20) self.maxFilesPerJob = self.am_getOption("MaxFilesPerJob", 100) self.maxAttemptsPerFile = self.am_getOption("MaxAttemptsPerFile", 256) self.kickDelay = self.am_getOption("KickAssignedHours", 1) self.maxKick = self.am_getOption("KickLimitPerCycle", 100) self.deleteDelay = self.am_getOption("DeleteGraceDays", 180) self.maxDelete = self.am_getOption("DeleteLimitPerCycle", 100) # lifetime of the proxy we download to delegate to FTS self.proxyLifetime = self.am_getOption("ProxyLifetime", PROXY_LIFETIME) return S_OK()
def _rh_executeAction(self, proposalTuple): """ Execute an action. :type proposalTuple: tuple :param proposalTuple: Type of action to execute. First position of the tuple must be the type of action to execute. The second position is the action itself. """ actionTuple = proposalTuple[1] gLogger.debug("Executing %s:%s action" % tuple(actionTuple)) startTime = time.time() actionType = actionTuple[0] self.serviceInfoDict['actionTuple'] = actionTuple try: if actionType == "RPC": retVal = self.__doRPC(actionTuple[1]) elif actionType == "FileTransfer": retVal = self.__doFileTransfer(actionTuple[1]) elif actionType == "Connection": retVal = self.__doConnection(actionTuple[1]) else: return S_ERROR("Unknown action %s" % actionType) except RequestHandler.ConnectionError as excp: gLogger.error("ConnectionError", str(excp)) return S_ERROR(excp) if not isReturnStructure(retVal): message = "Method %s for action %s does not return a S_OK/S_ERROR!" % (actionTuple[1], actionTuple[0]) gLogger.error(message) retVal = S_ERROR(message) elapsedTime = time.time() - startTime self.__logRemoteQueryResponse(retVal, elapsedTime) result = self.__trPool.send(self.__trid, retVal) # this will delete the value from the S_OK(value) del retVal return S_OK([result, elapsedTime])
def _refreshInThread(self): """ Refreshing configuration in the background. By default it uses a thread but it can be run also in the IOLoop """ retVal = self._refresh() if not retVal["OK"]: gLogger.error("Error while updating the configuration", retVal["Message"])
def __doFileTransfer(self, sDirection): """ Execute a file transfer action :type sDirection: string :param sDirection: Direction of the transfer :return: S_OK/S_ERROR """ retVal = self.__trPool.receive(self.__trid) if not retVal["OK"]: raise ConnectionError( "Error while receiving file description %s %s" % (self.srv_getFormattedRemoteCredentials(), retVal["Message"]) ) # Reconvert to tuple fileInfo = tuple(retVal["Value"]) sDirection = "%s%s" % (sDirection[0].lower(), sDirection[1:]) if "transfer_%s" % sDirection not in dir(self): self.__trPool.send(self.__trid, S_ERROR("Service can't transfer files %s" % sDirection)) return retVal = self.__trPool.send(self.__trid, S_OK("Accepted")) if not retVal["OK"]: return retVal self.__logRemoteQuery("FileTransfer/%s" % sDirection, fileInfo) self.__lockManager.lock("FileTransfer/%s" % sDirection) try: try: fileHelper = FileHelper(self.__trPool.get(self.__trid)) if sDirection == "fromClient": fileHelper.setDirection("fromClient") uRetVal = self.transfer_fromClient(fileInfo[0], fileInfo[1], fileInfo[2], fileHelper) elif sDirection == "toClient": fileHelper.setDirection("toClient") uRetVal = self.transfer_toClient(fileInfo[0], fileInfo[1], fileHelper) elif sDirection == "bulkFromClient": fileHelper.setDirection("fromClient") uRetVal = self.transfer_bulkFromClient(fileInfo[0], fileInfo[1], fileInfo[2], fileHelper) elif sDirection == "bulkToClient": fileHelper.setDirection("toClient") uRetVal = self.transfer_bulkToClient(fileInfo[0], fileInfo[1], fileHelper) elif sDirection == "listBulk": fileHelper.setDirection("toClient") uRetVal = self.transfer_listBulk(fileInfo[0], fileInfo[1], fileHelper) else: return S_ERROR("Direction %s does not exist!!!" % sDirection) if uRetVal["OK"] and not fileHelper.finishedTransmission(): gLogger.error("You haven't finished receiving/sending the file", str(fileInfo)) return S_ERROR("Incomplete transfer") del fileHelper return uRetVal finally: self.__lockManager.unlock("FileTransfer/%s" % sDirection) except Exception as e: # pylint: disable=broad-except gLogger.exception("Uncaught exception when serving Transfer", "%s" % sDirection, lException=e) return S_ERROR("Server error while serving %s: %s" % (sDirection, repr(e)))
def dumpLocalCFGToFile(self, fileName): try: with open(fileName, "w") as fd: fd.write(str(self.localCFG)) gLogger.verbose("Configuration file dumped", "'%s'" % fileName) except IOError: gLogger.error("Can't dump cfg file", "'%s'" % fileName) return S_ERROR("Can't dump cfg file '%s'" % fileName) return S_OK()
def __AutoRefresh(self): """ Auto refresh the configuration We disable pylint error because this class must be instanciated by a mixin to define the methods. """ if self._refreshEnabled: # pylint: disable=no-member if not self._refreshAndPublish(): # pylint: disable=no-member gLogger.error("Can't refresh configuration from any source")
def dumpLocalCFGToFile( self, fileName ): try: with open( fileName, "w" ) as fd: fd.write( str( self.localCFG ) ) gLogger.verbose( "Configuration file dumped", "'%s'" % fileName ) except IOError: gLogger.error( "Can't dump cfg file", "'%s'" % fileName ) return S_ERROR( "Can't dump cfg file '%s'" % fileName ) return S_OK()
def __doFileTransfer(self, sDirection): """ Execute a file transfer action @type sDirection: string @param sDirection: Direction of the transfer @return: S_OK/S_ERROR """ retVal = self.__trPool.receive(self.__trid) if not retVal["OK"]: raise RequestHandler.ConnectionError( "Error while receiving file description %s %s" % (self.srv_getFormattedRemoteCredentials(), retVal["Message"]) ) fileInfo = retVal["Value"] sDirection = "%s%s" % (sDirection[0].lower(), sDirection[1:]) if "transfer_%s" % sDirection not in dir(self): self.__trPool.send(self.__trid, S_ERROR("Service can't transfer files %s" % sDirection)) return retVal = self.__trPool.send(self.__trid, S_OK("Accepted")) if not retVal["OK"]: return retVal self.__logRemoteQuery("FileTransfer/%s" % sDirection, fileInfo) self.__lockManager.lock(sDirection) try: try: fileHelper = FileHelper(self.__trPool.get(self.__trid)) if sDirection == "fromClient": fileHelper.setDirection("fromClient") uRetVal = self.transfer_fromClient(fileInfo[0], fileInfo[1], fileInfo[2], fileHelper) elif sDirection == "toClient": fileHelper.setDirection("toClient") uRetVal = self.transfer_toClient(fileInfo[0], fileInfo[1], fileHelper) elif sDirection == "bulkFromClient": fileHelper.setDirection("fromClient") uRetVal = self.transfer_bulkFromClient(fileInfo[0], fileInfo[1], fileInfo[2], fileHelper) elif sDirection == "bulkToClient": fileHelper.setDirection("toClient") uRetVal = self.transfer_bulkToClient(fileInfo[0], fileInfo[1], fileHelper) elif sDirection == "listBulk": fileHelper.setDirection("toClient") uRetVal = self.transfer_listBulk(fileInfo[0], fileInfo[1], fileHelper) else: return S_ERROR("Direction %s does not exist!!!" % sDirection) if uRetVal["OK"] and not fileHelper.finishedTransmission(): gLogger.error("You haven't finished receiving/sending the file", str(fileInfo)) return S_ERROR("Incomplete transfer") return uRetVal finally: self.__lockManager.unlock(sDirection) except Exception, v: gLogger.exception("Uncaught exception when serving Transfer", "%s" % sDirection) return S_ERROR("Server error while serving %s: %s" % (sDirection, str(v)))
def __doConnection( self, methodName ): """ Connection callbacks """ retVal = self.__trPool.receive( self.__trid ) if not retVal[ 'OK' ]: gLogger.error( "Error receiving arguments", "%s %s" % ( self.srv_getFormattedRemoteCredentials(), retVal[ 'Message' ] ) ) return S_ERROR( "Error while receiving function arguments: %s" % retVal[ 'Message' ] ) args = retVal[ 'Value' ] return self._rh_executeConnectionCallback( methodName, args )
def __doFileTransfer( self, sDirection ): """ Execute a file transfer action @type sDirection: string @param sDirection: Direction of the transfer @return: S_OK/S_ERROR """ retVal = self.__trPool.receive( self.__trid ) if not retVal[ 'OK' ]: gLogger.error( "Error while receiving file description", "%s %s" % ( self.srv_getFormattedRemoteCredentials(), retVal[ 'Message' ] ) ) return S_ERROR( "Error while receiving file description: %s" % retVal[ 'Message' ] ) fileInfo = retVal[ 'Value' ] sDirection = "%s%s" % ( sDirection[0].lower(), sDirection[1:] ) if "transfer_%s" % sDirection not in dir( self ): self.__trPool.send( self.__trid, S_ERROR( "Service can't transfer files %s" % sDirection ) ) return retVal = self.__trPool.send( self.__trid, S_OK( "Accepted" ) ) if not retVal[ 'OK' ]: return retVal self.__logRemoteQuery( "FileTransfer/%s" % sDirection, fileInfo ) self.__lockManager.lock( sDirection ) try: try: fileHelper = FileHelper( self.__trPool.get( self.__trid ) ) if sDirection == "fromClient": fileHelper.setDirection( "fromClient" ) uRetVal = self.transfer_fromClient( fileInfo[0], fileInfo[1], fileInfo[2], fileHelper ) elif sDirection == "toClient" : fileHelper.setDirection( "toClient" ) uRetVal = self.transfer_toClient( fileInfo[0], fileInfo[1], fileHelper ) elif sDirection == "bulkFromClient" : fileHelper.setDirection( "fromClient" ) uRetVal = self.transfer_bulkFromClient( fileInfo[0], fileInfo[1], fileInfo[2], fileHelper ) elif sDirection == "bulkToClient" : fileHelper.setDirection( "toClient" ) uRetVal = self.transfer_bulkToClient( fileInfo[0], fileInfo[1], fileHelper ) elif sDirection == "listBulk": fileHelper.setDirection( "toClient" ) uRetVal = self.transfer_listBulk( fileInfo[0], fileInfo[1], fileHelper ) else: return S_ERROR( "Direction %s does not exist!!!" % sDirection ) if uRetVal[ 'OK' ] and not fileHelper.finishedTransmission(): gLogger.error( "You haven't finished receiving/sending the file", str( fileInfo ) ) return S_ERROR( "Incomplete transfer" ) return uRetVal finally: self.__lockManager.unlock( sDirection ) except Exception, v: gLogger.exception( "Uncaught exception when serving Transfer", "%s" % sDirection ) return S_ERROR( "Server error while serving %s: %s" % ( sDirection, str( v ) ) )
def __cbDisconnect( self, trid ): if not self.__trid: return if self.__trid != trid: gLogger.error( "OOps. trid's don't match. This shouldn't happen! (%s vs %s)" % ( self.__trid, trid ) ) return S_ERROR( "OOOPS" ) for cb in self.__specialCallbacks[ 'drop' ]: try: cb( self ) except: gLogger.exception( "Exception while processing disconnect callbacks" ) self.__trid = False
def __checkThreadID(self): if not self.__initStatus['OK']: return self.__initStatus cThID = thread.get_ident() if not self.__allowedThreadID: self.__allowedThreadID = cThID elif cThID != self.__allowedThreadID: msgTxt = """ =======DISET client thread safety error======================== Client %s can only run on thread %s and this is thread %s ===============================================================""" % ( str(self), self.__allowedThreadID, cThID) gLogger.error("DISET client thread safety error", msgTxt)
def __selectUrl(self, notselect, urls): """In case when multiple services are running in the same host, a new url has to be in a different host Note: If we do not have different host we will use the selected url... """ url = None for i in urls: retVal = Network.splitURL(i) if retVal['OK']: if retVal['Value'][1] != notselect[1]: # the hots are different url = i break else: gLogger.error(retVal['Message']) return url
def __selectUrl( self, notselect, urls ): """In case when multiple services are running in the same host, a new url has to be in a different host Note: If we do not have different host we will use the selected url... """ url = None for i in urls: retVal = Network.splitURL( i ) if retVal['OK']: if retVal['Value'][1] != notselect[1]: # the hots are different url = i break else: gLogger.error( retVal['Message'] ) return url
def __checkExpectedArgumentTypes(self, method, args): """ Check that the arguments received match the ones expected :type method: string :param method: Method to check against :type args: tuple :param args: Arguments to check :return: S_OK/S_ERROR """ sListName = "types_%s" % method try: oTypesList = getattr(self, sListName) except Exception: gLogger.error("There's no types info for method", "export_%s" % method) return S_ERROR( "Handler error for server %s while processing method %s" % (self.serviceInfoDict["serviceName"], method)) try: mismatch = False for iIndex in range(min(len(oTypesList), len(args))): # If None skip the parameter if oTypesList[iIndex] is None: continue # If parameter is a list or a tuple check types inside elif isinstance(oTypesList[iIndex], (tuple, list)): if not isinstance(args[iIndex], tuple(oTypesList[iIndex])): mismatch = True # else check the parameter elif not isinstance(args[iIndex], oTypesList[iIndex]): mismatch = True # Has there been a mismatch? if mismatch: sError = "Type mismatch in parameter %d (starting with param 0) Received %s, expected %s" % ( iIndex, type(args[iIndex]), str(oTypesList[iIndex]), ) return S_ERROR(sError) if len(args) < len(oTypesList): return S_ERROR("Function %s expects at least %s arguments" % (method, len(oTypesList))) except Exception as v: sError = "Error in parameter check: %s" % str(v) gLogger.exception(sError) return S_ERROR(sError) return S_OK()
def __call__(self, *args): self.lock() try: try: returnValue = self.cMethod(*args) except GSI.SSL.ZeroReturnError: returnValue = 0 except Exception as v: if v[0] == -1: return 0 else: gLogger.error("ERROR while executing", "%s( %s ) (%s)" % (self.sFunctionName, str(args)[1:-2], str(v))) raise v finally: self.unlock() return returnValue
def __checkThreadID( self ): if not self.__initStatus[ 'OK' ]: return self.__initStatus cThID = thread.get_ident() if not self.__allowedThreadID: self.__allowedThreadID = cThID elif cThID != self.__allowedThreadID : msgTxt = """ =======DISET client thread safety error======================== Client %s can only run on thread %s and this is thread %s ===============================================================""" % ( str( self ), self.__allowedThreadID, cThID ) gLogger.error( "DISET client thread safety error", msgTxt )
def __doRPC( self, method ): """ Execute an RPC action @type method: string @param method: Method to execute @return: S_OK/S_ERROR """ retVal = self.__trPool.receive( self.__trid ) if not retVal[ 'OK' ]: gLogger.error( "Error receiving arguments", "%s %s" % ( self.srv_getFormattedRemoteCredentials(), retVal[ 'Message' ] ) ) return S_ERROR( "Error while receiving function arguments: %s" % retVal[ 'Message' ] ) args = retVal[ 'Value' ] self.__logRemoteQuery( "RPC/%s" % method, args ) return self.__RPCCallFunction( method, args )
def __call__( self, *args ): self.lock() try: try: returnValue = apply( self.cMethod , args ) except GSI.SSL.ZeroReturnError: returnValue = 0 except Exception, v: if v[0] == -1: return 0 else: gLogger.error( "ERROR while executing = %s( %s ) (%s)" % ( self.sFunctionName, str( args )[1:-2], str( v ) ) ) raise v finally: self.unlock() return returnValue
def loadObjects(path, reFilter=None, parentClass=None): if not reFilter: reFilter = re.compile(".*[a-z1-9]\.py$") pathList = List.fromChar(path, "/") parentModuleList = [ "%sDIRAC" % ext for ext in CSGlobals.getCSExtensions() ] + ['DIRAC'] objectsToLoad = {} #Find which object files match for parentModule in parentModuleList: objDir = os.path.join(DIRAC.rootPath, parentModule, *pathList) if not os.path.isdir(objDir): continue for objFile in os.listdir(objDir): if reFilter.match(objFile): pythonClassName = objFile[:-3] if pythonClassName not in objectsToLoad: gLogger.info("Adding to load queue %s/%s/%s" % (parentModule, path, pythonClassName)) objectsToLoad[pythonClassName] = parentModule #Load them! loadedObjects = {} for pythonClassName in objectsToLoad: parentModule = objectsToLoad[pythonClassName] try: #Where parentModule can be DIRAC, pathList is something like [ "AccountingSystem", "Client", "Types" ] #And the python class name is.. well, the python class name objPythonPath = "%s.%s.%s" % (parentModule, ".".join(pathList), pythonClassName) objModule = __import__(objPythonPath, globals(), locals(), pythonClassName) objClass = getattr(objModule, pythonClassName) except Exception, e: gLogger.error("Can't load type %s/%s: %s" % (parentModule, pythonClassName, str(e))) continue if parentClass == objClass: continue if parentClass and not issubclass(objClass, parentClass): gLogger.warn("%s is not a subclass of %s. Skipping" % (objClass, parentClass)) continue gLogger.info("Loaded %s" % objPythonPath) loadedObjects[pythonClassName] = objClass
def main(): if os.environ.get("DIRAC_USE_TORNADO_IOLOOP", "false").lower() not in ("yes", "true"): raise RuntimeError( "DIRAC_USE_TORNADO_IOLOOP is not defined in the environment." + "\n" + "It is necessary to run with Tornado." + "\n" + "https://dirac.readthedocs.io/en/latest/DeveloperGuide/TornadoServices/index.html" ) from DIRAC.ConfigurationSystem.Client.PathFinder import getServiceSection from DIRAC.ConfigurationSystem.Client.ConfigurationData import gConfigurationData from DIRAC.ConfigurationSystem.private.Refresher import gRefresher from DIRAC.Core.Utilities.DErrno import includeExtensionErrors from DIRAC.Core.Tornado.Server.TornadoServer import TornadoServer from DIRAC.FrameworkSystem.Client.Logger import gLogger if gConfigurationData.isMaster(): gRefresher.disable() localCfg = Script.localCfg localCfg.addMandatoryEntry("/DIRAC/Setup") localCfg.addDefaultEntry("/DIRAC/Security/UseServerCertificate", "yes") localCfg.addDefaultEntry("LogLevel", "INFO") localCfg.addDefaultEntry("LogColor", True) resultDict = localCfg.loadUserData() if not resultDict["OK"]: gLogger.initialize("Tornado-CS", "/") gLogger.error("There were errors when loading configuration", resultDict["Message"]) sys.exit(1) includeExtensionErrors() gLogger.initialize("Tornado-CS", "/") # get the specific master CS port try: csPort = int( gConfigurationData.extractOptionFromCFG( "%s/Port" % getServiceSection("Configuration/Server"))) except TypeError: csPort = None serverToLaunch = TornadoServer(services="Configuration/Server", port=csPort) serverToLaunch.startTornado()
def main(): if os.environ.get("DIRAC_USE_TORNADO_IOLOOP", "false").lower() not in ("yes", "true"): raise RuntimeError( "DIRAC_USE_TORNADO_IOLOOP is not defined in the environment." + "\n" + "It is necessary to run with Tornado." + "\n" + "https://dirac.readthedocs.io/en/latest/DeveloperGuide/TornadoServices/index.html" ) from DIRAC import gConfig from DIRAC.ConfigurationSystem.Client import PathFinder from DIRAC.ConfigurationSystem.Client.ConfigurationData import gConfigurationData from DIRAC.Core.Tornado.Server.TornadoServer import TornadoServer from DIRAC.Core.Utilities.DErrno import includeExtensionErrors from DIRAC.FrameworkSystem.Client.Logger import gLogger localCfg = Script.localCfg localCfg.setConfigurationForServer("Tornado/Tornado") localCfg.addMandatoryEntry("/DIRAC/Setup") localCfg.addDefaultEntry("/DIRAC/Security/UseServerCertificate", "yes") localCfg.addDefaultEntry("LogLevel", "INFO") localCfg.addDefaultEntry("LogColor", True) resultDict = localCfg.loadUserData() if not resultDict["OK"]: gLogger.initialize("Tornado", "/") gLogger.error("There were errors when loading configuration", resultDict["Message"]) sys.exit(1) includeExtensionErrors() gLogger.initialize("Tornado", "/") # We check if there is no configuration server started as master # If you want to start a master CS you should use Configuration_Server.cfg and # use tornado-start-CS.py key = "/Systems/Configuration/%s/Services/Server/Protocol" % PathFinder.getSystemInstance( "Configuration") if gConfigurationData.isMaster() and gConfig.getValue( key, "dips").lower() == "https": gLogger.fatal("You can't run the CS and services in the same server!") sys.exit(0) serverToLaunch = TornadoServer(endpoints=True) serverToLaunch.startTornado()
def __backupCurrentConfiguration( self, backupName ): configurationFilename = "%s.cfg" % self.getName() configurationFile = os.path.join( DIRAC.rootPath, "etc", configurationFilename ) today = Time.date() backupPath = os.path.join( self.getBackupDir(), str( today.year ), "%02d" % today.month ) mkDir(backupPath) backupFile = os.path.join( backupPath, configurationFilename.replace( ".cfg", ".%s.zip" % backupName ) ) if os.path.isfile( configurationFile ): gLogger.info( "Making a backup of configuration in %s" % backupFile ) try: with zipfile.ZipFile( backupFile, "w", zipfile.ZIP_DEFLATED ) as zf: zf.write( configurationFile, "%s.backup.%s" % ( os.path.split( configurationFile )[1], backupName ) ) except Exception: gLogger.exception() gLogger.error( "Cannot backup configuration data file", "file %s" % backupFile ) else: gLogger.warn( "CS data file does not exist", configurationFile )
def __checkExpectedArgumentTypes(self, method, args): """ Check that the arguments received match the ones expected @type method: string @param method: Method to check against @type args: tuple @param args: Arguments to check @return: S_OK/S_ERROR """ sListName = "types_%s" % method try: oTypesList = getattr(self, sListName) except: gLogger.error("There's no types info for method export_%s" % method) return S_ERROR( "Handler error for server %s while processing method %s" % (self.serviceInfoDict["serviceName"], method) ) try: mismatch = False for iIndex in range(min(len(oTypesList), len(args))): # If none skip a parameter if oTypesList[iIndex] == None: continue # If parameter is a list or a tuple check types inside elif type(oTypesList[iIndex]) in (types.TupleType, types.ListType): if not type(args[iIndex]) in oTypesList[iIndex]: mismatch = True # else check the parameter elif not type(args[iIndex]) == oTypesList[iIndex]: mismatch = True # Has there been a mismatch? if mismatch: sError = "Type mismatch in parameter %d (starting with param 0) Received %s, expected %s" % ( iIndex, type(args[iIndex]), str(oTypesList[iIndex]), ) return S_ERROR(sError) if len(args) < len(oTypesList): return S_ERROR("Function %s expects at least %s arguments" % (method, len(oTypesList))) except Exception, v: sError = "Error in parameter check: %s" % str(v) gLogger.exception(sError) return S_ERROR(sError)
def initialize(cls, script=False, ignoreErrors=False, initializeMonitor=False, enableCommandLine=False): """initialization :param str script: script name :param bool ignoreErrors: ignore errors when loading configuration :param bool initializeMonitor: to use monitoring :param bool enableCommandLine: enable parse command line """ # Please do not call initialize in every file if cls.alreadyInitialized: return False userDisabled = not cls.localCfg.isCSEnabled() cls.alreadyInitialized = True if not userDisabled: cls.localCfg.disableCS() if not enableCommandLine: cls.localCfg.disableParsingCommandLine() if script: cls.scriptName = script cls.localCfg.setConfigurationForScript(cls.scriptName) if not ignoreErrors: cls.localCfg.addMandatoryEntry("/DIRAC/Setup") resultDict = cls.localCfg.loadUserData() if not ignoreErrors and not resultDict["OK"]: gLogger.error("There were errors when loading configuration", resultDict["Message"]) sys.exit(1) if not userDisabled: cls.localCfg.enableCS() if initializeMonitor: gMonitor.setComponentType(gMonitor.COMPONENT_SCRIPT) gMonitor.setComponentName(cls.scriptName) gMonitor.setComponentLocation("script") gMonitor.initialize() else: gMonitor.disable() includeExtensionErrors() return True
def __cbDisconnect(self, trid): if not self.__trid: return if self.__trid != trid: gLogger.error("OOps. trid's don't match. This shouldn't happen!", "(%s vs %s)" % (self.__trid, trid)) return S_ERROR("OOOPS") self.__trid = False try: self.__transport.close() except BaseException: pass for cb in self.__specialCallbacks['drop']: try: cb(self) except SystemExit: raise except BaseException: gLogger.exception("Exception while processing disconnect callbacks")
def loadObjects( path, reFilter = None, parentClass = None ): if not reFilter: reFilter = re.compile( ".*[a-z1-9]\.py$" ) pathList = List.fromChar( path, "/" ) parentModuleList = [ "%sDIRAC" % ext for ext in CSGlobals.getCSExtensions() ] + [ 'DIRAC' ] objectsToLoad = {} #Find which object files match for parentModule in parentModuleList: objDir = os.path.join( DIRAC.rootPath, parentModule, *pathList ) if not os.path.isdir( objDir ): continue for objFile in os.listdir( objDir ): if reFilter.match( objFile ): pythonClassName = objFile[:-3] if pythonClassName not in objectsToLoad: gLogger.info( "Adding to load queue %s/%s/%s" % ( parentModule, path, pythonClassName ) ) objectsToLoad[ pythonClassName ] = parentModule #Load them! loadedObjects = {} for pythonClassName in objectsToLoad: parentModule = objectsToLoad[ pythonClassName ] try: #Where parentModule can be DIRAC, pathList is something like [ "AccountingSystem", "Client", "Types" ] #And the python class name is.. well, the python class name objPythonPath = "%s.%s.%s" % ( parentModule, ".".join( pathList ), pythonClassName ) objModule = __import__( objPythonPath, globals(), locals(), pythonClassName ) objClass = getattr( objModule, pythonClassName ) except Exception, e: gLogger.error( "Can't load type %s/%s: %s" % ( parentModule, pythonClassName, str( e ) ) ) continue if parentClass == objClass: continue if parentClass and not issubclass( objClass, parentClass ): gLogger.warn( "%s is not a subclass of %s. Skipping" % ( objClass, parentClass ) ) continue gLogger.info( "Loaded %s" % objPythonPath ) loadedObjects[ pythonClassName ] = objClass
def dict_to_object(self, dataDict): """ Convert the dictionary into an object """ import importlib # If it is not an FTS3 object, just return the structure as is if not ('__type__' in dataDict and '__module__' in dataDict): return dataDict # Get the class and module className = dataDict.pop('__type__') modName = dataDict.pop('__module__') datetimeAttributes = dataDict.pop('__datetime__', []) datetimeSet = set(datetimeAttributes) try: # Load the module mod = importlib.import_module(modName) # import the class cl = getattr(mod, className) # Instantiate the object obj = cl() # Set each attribute for attrName, attrValue in dataDict.iteritems(): # If the value is None, do not set it # This is needed to play along well with SQLalchemy if attrValue is None: continue if attrName in datetimeSet: attrValue = datetime.datetime.strptime(attrValue, FTS3Serializable._datetimeFormat) setattr(obj, attrName, attrValue) return obj except Exception as e: gLogger.error('exception in FTS3JSONDecoder %s for type %s' % (e, className)) dataDict['__type__'] = className dataDict['__module__'] = modName dataDict['__datetime__'] = datetimeAttributes return dataDict
def initialize(script=False, ignoreErrors=False, initializeMonitor=False, enableCommandLine=False): global scriptName, gIsAlreadyInitialized # Please do not call initialize in every file if gIsAlreadyInitialized: return False gIsAlreadyInitialized = True userDisabled = not localCfg.isCSEnabled() if not userDisabled: localCfg.disableCS() if not enableCommandLine: localCfg.disableParsingCommandLine() if script: scriptName = script localCfg.setConfigurationForScript(scriptName) if not ignoreErrors: localCfg.addMandatoryEntry("/DIRAC/Setup") resultDict = localCfg.loadUserData() if not ignoreErrors and not resultDict['OK']: gLogger.error("There were errors when loading configuration", resultDict['Message']) sys.exit(1) if not userDisabled: localCfg.enableCS() if initializeMonitor: gMonitor.setComponentType(gMonitor.COMPONENT_SCRIPT) gMonitor.setComponentName(scriptName) gMonitor.setComponentLocation("script") gMonitor.initialize() else: gMonitor.disable() includeExtensionErrors() return True
def __checkThreadID(self): """ ..warning:: just guessing.... This seems to check that we are not creating a client and then using it in a multithreaded environment. However, it is triggered only if self.__enableThreadCheck is to True, but it is hardcoded to False, and does not seem to be modified anywhere in the code. """ if not self.__initStatus['OK']: return self.__initStatus cThID = thread.get_ident() if not self.__allowedThreadID: self.__allowedThreadID = cThID elif cThID != self.__allowedThreadID: msgTxt = """ =======DISET client thread safety error======================== Client %s can only run on thread %s and this is thread %s ===============================================================""" % (str(self), self.__allowedThreadID, cThID) gLogger.error("DISET client thread safety error", msgTxt)
def checkSanity( urlTuple, kwargs ): """ Check that all ssl environment is ok """ useCerts = False certFile = '' if "useCertificates" in kwargs and kwargs[ 'useCertificates' ]: certTuple = Locations.getHostCertificateAndKeyLocation() if not certTuple: gLogger.error( "No cert/key found! " ) return S_ERROR( "No cert/key found! " ) certFile = certTuple[0] useCerts = True elif "proxyString" in kwargs: if not isinstance( kwargs[ 'proxyString' ], basestring ): gLogger.error( "proxyString parameter is not a valid type", str( type( kwargs[ 'proxyString' ] ) ) ) return S_ERROR( "proxyString parameter is not a valid type" ) else: if "proxyLocation" in kwargs: certFile = kwargs[ "proxyLocation" ] else: certFile = Locations.getProxyLocation() if not certFile: gLogger.error( "No proxy found" ) return S_ERROR( "No proxy found" ) elif not os.path.isfile( certFile ): gLogger.error( "Proxy file does not exist", certFile ) return S_ERROR( "%s proxy file does not exist" % certFile ) #For certs always check CA's. For clients skipServerIdentityCheck if 'skipCACheck' not in kwargs or not kwargs[ 'skipCACheck' ]: if not Locations.getCAsLocation(): gLogger.error( "No CAs found!" ) return S_ERROR( "No CAs found!" ) if "proxyString" in kwargs: certObj = X509Chain() retVal = certObj.loadChainFromString( kwargs[ 'proxyString' ] ) if not retVal[ 'OK' ]: gLogger.error( "Can't load proxy string" ) return S_ERROR( "Can't load proxy string" ) else: if useCerts: certObj = X509Certificate() certObj.loadFromFile( certFile ) else: certObj = X509Chain() certObj.loadChainFromFile( certFile ) retVal = certObj.hasExpired() if not retVal[ 'OK' ]: gLogger.error( "Can't verify proxy or certificate file", "%s:%s" % ( certFile, retVal[ 'Message' ] ) ) return S_ERROR( "Can't verify file %s:%s" % ( certFile, retVal[ 'Message' ] ) ) else: if retVal[ 'Value' ]: notAfter = certObj.getNotAfterDate() if notAfter[ 'OK' ]: notAfter = notAfter[ 'Value' ] else: notAfter = "unknown" gLogger.error( "PEM file has expired", "%s is not valid after %s" % ( certFile, notAfter ) ) return S_ERROR( "PEM file %s has expired, not valid after %s" % ( certFile, notAfter ) ) idDict = {} retVal = certObj.getDIRACGroup( ignoreDefault = True ) if retVal[ 'OK' ] and retVal[ 'Value' ] != False: idDict[ 'group' ] = retVal[ 'Value' ] if useCerts: idDict[ 'DN' ] = certObj.getSubjectDN()[ 'Value' ] else: idDict[ 'DN' ] = certObj.getIssuerCert()[ 'Value' ].getSubjectDN()[ 'Value' ] return S_OK( idDict )
def __refreshInThread( self ): retVal = self.__refresh() if not retVal[ 'OK' ]: gLogger.error( "Error while updating the configuration", retVal[ 'Message' ] )
positionalArgs = localCfg.getPositionalArguments() if len( positionalArgs ) == 0: gLogger.fatal( "You must specify which server to run!" ) sys.exit( 1 ) serverName = positionalArgs[0] localCfg.setConfigurationForServer( serverName ) localCfg.addMandatoryEntry( "Port" ) #localCfg.addMandatoryEntry( "HandlerPath" ) localCfg.addMandatoryEntry( "/DIRAC/Setup" ) localCfg.addDefaultEntry( "/DIRAC/Security/UseServerCertificate", "yes" ) localCfg.addDefaultEntry( "LogLevel", "INFO" ) localCfg.addDefaultEntry( "LogColor", True ) resultDict = localCfg.loadUserData() if not resultDict[ 'OK' ]: gLogger.initialize( serverName, "/" ) gLogger.error( "There were errors when loading configuration", resultDict[ 'Message' ] ) sys.exit( 1 ) serverToLaunch = ServiceReactor() result = serverToLaunch.initialize( positionalArgs ) if not result[ 'OK' ]: gLogger.error( result[ 'Message' ] ) sys.exit( 1 ) result = serverToLaunch.serve() if not result[ 'OK' ]: gLogger.error( result[ 'Message' ] ) sys.exit( 1 )