def __sockConnect( self, hostAddress, sockType, timeout, retries ): try: osSocket = socket.socket( sockType, socket.SOCK_STREAM ) except socket.error as e: gLogger.warn( "Exception while creating a socket:", str( e ) ) return S_ERROR( "Exception while creating a socket:%s" % str( e ) ) # osSocket.setblocking( 0 ) if timeout: tsocket = self.getSocketTimeout() gLogger.debug( "Connection timeout set to: ", tsocket ) osSocket.settimeout( tsocket ) # we try to connect 3 times with 1 second timeout try: osSocket.connect( hostAddress ) except socket.error , e: if e.args[0] == "timed out": osSocket.close() if retries: return self.__sockConnect( hostAddress, sockType, timeout, retries - 1 ) else: return S_ERROR( "Can't connect: %s" % str( e ) ) if e.args[0] not in ( 114, 115 ): return S_ERROR( "Can't connect: %s" % str( e ) ) #Connect in progress oL = select.select( [], [ osSocket ], [], timeout )[1] if len( oL ) == 0: osSocket.close() return S_ERROR( "Connection timeout" ) errno = osSocket.getsockopt( socket.SOL_SOCKET, socket.SO_ERROR ) if errno != 0: return S_ERROR( "Can't connect: %s" % str( ( errno, os.strerror( errno ) ) ) )
def close(self): gLogger.debug("Closing socket") try: self.oSocket.shutdown(socket.SHUT_RDWR) except BaseException: pass self.oSocket.close()
def close( self ): gLogger.debug( "Closing socket" ) try: self.oSocket.shutdown( socket.SHUT_RDWR ) except: pass self.oSocket.close()
def _rh_executeAction(self, proposalTuple): """ Execute an action. @type proposalTuple: tuple @param proposalTuple: Type of action to execute. First position of the tuple must be the type of action to execute. The second position is the action itself. """ actionTuple = proposalTuple[1] gLogger.debug("Executing %s:%s action" % actionTuple) startTime = time.time() actionType = actionTuple[0] self.serviceInfoDict['actionTuple'] = actionTuple try: if actionType == "RPC": retVal = self.__doRPC(actionTuple[1]) elif actionType == "FileTransfer": retVal = self.__doFileTransfer(actionTuple[1]) elif actionType == "Connection": retVal = self.__doConnection(actionTuple[1]) else: return S_ERROR("Unknown action %s" % actionType) except RequestHandler.ConnectionError, excp: gLogger.error("ConnectionError", str(excp)) return S_ERROR(excp)
def __init__(self, loadDefaultCFG=True): envVar = os.environ.get("DIRAC_FEWER_CFG_LOCKS", "no").lower() self.__locksEnabled = envVar not in ("y", "yes", "t", "true", "on", "1") if self.__locksEnabled: lr = LockRing() self.threadingEvent = lr.getEvent() self.threadingEvent.set() self.threadingLock = lr.getLock() self.runningThreadsNumber = 0 self.__compressedConfigurationData = None self.configurationPath = "/DIRAC/Configuration" self.backupsDir = os.path.join(DIRAC.rootPath, "etc", "csbackup") self._isService = False self.localCFG = CFG() self.remoteCFG = CFG() self.mergedCFG = CFG() self.remoteServerList = [] if loadDefaultCFG: defaultCFGFile = os.path.join(DIRAC.rootPath, "etc", "dirac.cfg") gLogger.debug("dirac.cfg should be at", "%s" % defaultCFGFile) retVal = self.loadFile(defaultCFGFile) if not retVal["OK"]: gLogger.warn("Can't load %s file" % defaultCFGFile) self.sync()
def _rh_executeAction(self, proposalTuple): """ Execute an action. @type actionTuple: tuple @param actionTuple: Type of action to execute. First position of the tuple must be the type of action to execute. The second position is the action itself. """ actionTuple = proposalTuple[1] gLogger.debug("Executing %s:%s action" % actionTuple) startTime = time.time() actionType = actionTuple[0] if actionType == "RPC": retVal = self.__doRPC(actionTuple[1]) elif actionType == "FileTransfer": retVal = self.__doFileTransfer(actionTuple[1]) elif actionType == "Connection": retVal = self.__doConnection(actionTuple[1]) else: raise Exception("Unknown action (%s)" % actionType) if not retVal: message = "Method %s for action %s does not have a return value!" % ( actionTuple[1], actionTuple[0]) gLogger.error(message) retVal = S_ERROR(message) self.__logRemoteQueryResponse(retVal, time.time() - startTime) return self.__trPool.send(self.__trid, retVal)
def _connect( self ): self.__discoverExtraCredentials() if not self.__initStatus[ 'OK' ]: return self.__initStatus if self.__enableThreadCheck: self.__checkThreadID() gLogger.debug( "Connecting to: %s" % self.serviceURL ) try: transport = gProtocolDict[ self.__URLTuple[0] ][ 'transport' ]( self.__URLTuple[1:3], **self.kwargs ) retVal = transport.initAsClient() if not retVal[ 'OK' ]: if self.__retry < 5: url = "%s://%s:%d/%s" % ( self.__URLTuple[0], self.__URLTuple[1], int( self.__URLTuple[2] ), self.__URLTuple[3] ) if url not in self.__bannedUrls: gLogger.notice( "URL banned", "%s" % url ) self.__bannedUrls += [url] self.__retry += 1 gLogger.info( "Retry connection: ", "%d" % self.__retry ) time.sleep( self.__retryDelay ) self.__discoverURL() return self._connect() else: return S_ERROR( "Can't connect to %s: %s" % ( self.serviceURL, retVal ) ) except Exception, e: return S_ERROR( "Can't connect to %s: %s" % ( self.serviceURL, e ) )
def _connect( self ): self.__discoverExtraCredentials() if not self.__initStatus[ 'OK' ]: return self.__initStatus if self.__enableThreadCheck: self.__checkThreadID() gLogger.debug( "Connecting to: %s" % self.serviceURL ) try: transport = gProtocolDict[ self.__URLTuple[0] ][ 'transport' ]( self.__URLTuple[1:3], **self.kwargs ) retVal = transport.initAsClient() if not retVal[ 'OK' ]: if self.__retry < self.__nbOfRetry * self.__nbOfUrls - 1: url = "%s://%s:%d/%s" % ( self.__URLTuple[0], self.__URLTuple[1], int( self.__URLTuple[2] ), self.__URLTuple[3] ) if url not in self.__bannedUrls: gLogger.notice( "URL banned", "%s" % url ) self.__bannedUrls += [url] self.__retry += 1 gLogger.info( "Retry connection: ", "%d" % self.__retry ) if (len(self.__bannedUrls) == self.__nbOfUrls): self.__retryDelay = 3. / self.__nbOfUrls if self.__nbOfUrls > 1 else 5 # we run only one service! In that case we increase the retry delay. gLogger.info( "Waiting %f second before retry all service(s)" % self.__retryDelay ) time.sleep( self.__retryDelay ) self.__discoverURL() return self._connect() else: return S_ERROR( "Can't connect to %s: %s" % ( self.serviceURL, retVal ) ) except Exception, e: return S_ERROR( "Can't connect to %s: %s" % ( self.serviceURL, e ) )
def _rh_executeAction( self, proposalTuple ): """ Execute an action. :type proposalTuple: tuple :param proposalTuple: Type of action to execute. First position of the tuple must be the type of action to execute. The second position is the action itself. """ actionTuple = proposalTuple[1] gLogger.debug( "Executing %s:%s action" % actionTuple ) startTime = time.time() actionType = actionTuple[0] self.serviceInfoDict[ 'actionTuple' ] = actionTuple try: if actionType == "RPC": retVal = self.__doRPC( actionTuple[1] ) elif actionType == "FileTransfer": retVal = self.__doFileTransfer( actionTuple[1] ) elif actionType == "Connection": retVal = self.__doConnection( actionTuple[1] ) else: return S_ERROR( "Unknown action %s" % actionType ) except RequestHandler.ConnectionError, excp: gLogger.error( "ConnectionError", str( excp ) ) return S_ERROR( excp )
def close(self): gLogger.debug("Closing socket") try: self.oSocket.shutdown() self.oSocket.close() except: pass
def _rh_executeAction(self, proposalTuple): """ Execute an action. :type proposalTuple: tuple :param proposalTuple: Type of action to execute. First position of the tuple must be the type of action to execute. The second position is the action itself. """ actionTuple = proposalTuple[1] gLogger.debug("Executing %s:%s action" % tuple(actionTuple)) startTime = time.time() actionType = actionTuple[0] self.serviceInfoDict['actionTuple'] = actionTuple try: if actionType == "RPC": retVal = self.__doRPC(actionTuple[1]) elif actionType == "FileTransfer": retVal = self.__doFileTransfer(actionTuple[1]) elif actionType == "Connection": retVal = self.__doConnection(actionTuple[1]) else: return S_ERROR("Unknown action %s" % actionType) except RequestHandler.ConnectionError as excp: gLogger.error("ConnectionError", str(excp)) return S_ERROR(excp) if not isReturnStructure(retVal): message = "Method %s for action %s does not return a S_OK/S_ERROR!" % (actionTuple[1], actionTuple[0]) gLogger.error(message) retVal = S_ERROR(message) elapsedTime = time.time() - startTime self.__logRemoteQueryResponse(retVal, elapsedTime) result = self.__trPool.send(self.__trid, retVal) # this will delete the value from the S_OK(value) del retVal return S_OK([result, elapsedTime])
def _rh_executeAction( self, proposalTuple ): """ Execute an action. @type actionTuple: tuple @param actionTuple: Type of action to execute. First position of the tuple must be the type of action to execute. The second position is the action itself. """ actionTuple = proposalTuple[1] gLogger.debug( "Executing %s:%s action" % actionTuple ) startTime = time.time() actionType = actionTuple[0] if actionType == "RPC": retVal = self.__doRPC( actionTuple[1] ) elif actionType == "FileTransfer": retVal = self.__doFileTransfer( actionTuple[1] ) elif actionType == "Connection": retVal = self.__doConnection( actionTuple[1] ) else: raise Exception( "Unknown action (%s)" % actionType ) if not retVal: message = "Method %s for action %s does not have a return value!" % ( actionTuple[1], actionTuple[0] ) gLogger.error( message ) retVal = S_ERROR( message ) self.__logRemoteQueryResponse( retVal, time.time() - startTime ) return self.__trPool.send( self.__trid, retVal )
def __sockConnect( self, hostAddress, sockType, timeout, retries ): try: osSocket = socket.socket( sockType, socket.SOCK_STREAM ) except socket.error as e: gLogger.warn( "Exception while creating a socket:", str( e ) ) return S_ERROR( "Exception while creating a socket:%s" % str( e ) ) # osSocket.setblocking( 0 ) if timeout: tsocket = self.getSocketTimeout() gLogger.debug( "Connection timeout set to: ", tsocket ) osSocket.settimeout( tsocket ) # we try to connect 3 times with 1 second timeout try: osSocket.connect( hostAddress ) except socket.error , e: if e.args[0] == "timed out": osSocket.close() if retries: return self.__sockConnect( hostAddress, sockType, timeout, retries - 1 ) else: return S_ERROR( "Can't connect: %s" % str( e ) ) if e.args[0] not in ( 114, 115 ): return S_ERROR( "Can't connect: %s" % str( e ) ) #Connect in progress oL = select.select( [], [ osSocket ], [], timeout )[1] if len( oL ) == 0: osSocket.close() return S_ERROR( "Connection timeout" ) errno = osSocket.getsockopt( socket.SOL_SOCKET, socket.SO_ERROR ) if errno != 0: return S_ERROR( "Can't connect: %s" % str( ( errno, os.strerror( errno ) ) ) )
def _rh_executeConnectionCallback( self, methodName, args = False ): self.__logRemoteQuery( "Connection/%s" % methodName, args ) if methodName not in RequestHandler.__connectionCallbackTypes: return S_ERROR( "Invalid connection method %s" % methodName ) cbTypes = RequestHandler.__connectionCallbackTypes[ methodName ] if args: if len( args ) != len( cbTypes ): return S_ERROR( "Expected %s arguments" % len( cbTypes ) ) for i in range( len( cbTypes ) ): if type( args[ i ] ) != cbTypes[i]: return S_ERROR( "Invalid type for argument %s" % i ) self.__trPool.associateData( self.__trid, "connectData", args ) if not args: args = self.__trPool.getAssociatedData( self.__trid, "connectData" ) realMethod = "conn_%s" % methodName gLogger.debug( "Callback to %s" % realMethod ) try: oMethod = getattr( self, realMethod ) except: #No callback defined by handler return S_OK() try: if args: uReturnValue = oMethod( self.__trid, *args ) else: uReturnValue = oMethod( self.__trid ) return uReturnValue except Exception, v: gLogger.exception( "Uncaught exception when serving Connect", "Function %s" % realMethod ) return S_ERROR( "Server error while serving %s: %s" % ( methodName, str( v ) ) )
def getValue( self, optionPath, defaultValue = None ): retVal = self.getOption( optionPath, defaultValue ) if retVal[ 'OK' ]: return retVal[ 'Value' ] else: gLogger.debug( "gConfig.getValue for invalid value", retVal[ 'Message' ] ) return defaultValue
def __RPCCallFunction(self, method, args): realMethod = "export_%s" % method gLogger.debug("RPC to %s" % realMethod) try: oMethod = getattr(self, realMethod) except: return S_ERROR("Unknown method %s" % method) dRetVal = self.__checkExpectedArgumentTypes(method, args) if not dRetVal['OK']: return dRetVal self.__lockManager.lock("RPC/%s" % method) self.__msgBroker.addTransportId(self.__trid, self.serviceInfoDict['serviceName'], idleRead=True) try: try: uReturnValue = oMethod(*args) return uReturnValue finally: self.__lockManager.unlock("RPC/%s" % method) self.__msgBroker.removeTransport(self.__trid, closeTransport=False) except Exception, v: gLogger.exception("Uncaught exception when serving RPC", "Function %s" % method) return S_ERROR("Server error while serving %s: %s" % (method, str(v)))
def _rh_executeConnectionCallback(self, methodName, args=False): self.__logRemoteQuery("Connection/%s" % methodName, args) if methodName not in RequestHandler.__connectionCallbackTypes: return S_ERROR("Invalid connection method %s" % methodName) cbTypes = RequestHandler.__connectionCallbackTypes[methodName] if args: if len(args) != len(cbTypes): return S_ERROR("Expected %s arguments" % len(cbTypes)) for i in range(len(cbTypes)): if type(args[i]) != cbTypes[i]: return S_ERROR("Invalid type for argument %s" % i) self.__trPool.associateData(self.__trid, "connectData", args) if not args: args = self.__trPool.getAssociatedData(self.__trid, "connectData") realMethod = "conn_%s" % methodName gLogger.debug("Callback to %s" % realMethod) try: oMethod = getattr(self, realMethod) except: #No callback defined by handler return S_OK() try: if args: uReturnValue = oMethod(self.__trid, *args) else: uReturnValue = oMethod(self.__trid) return uReturnValue except Exception, v: gLogger.exception("Uncaught exception when serving Connect", "Function %s" % realMethod) return S_ERROR("Server error while serving %s: %s" % (methodName, str(v)))
def receiveData( self, maxBufferSize = 0, blockAfterKeepAlive = True, idleReceive = False ): self.__updateLastActionTimestamp() if self.receivedMessages: return self.receivedMessages.pop( 0 ) #Buffer size can't be less than 0 maxBufferSize = max( maxBufferSize, 0 ) try: #Look either for message length of keep alive magic string iSeparatorPosition = self.byteStream.find( ":", 0, 10 ) keepAliveMagicLen = len( BaseTransport.keepAliveMagic ) isKeepAlive = self.byteStream.find( BaseTransport.keepAliveMagic, 0, keepAliveMagicLen ) == 0 #While not found the message length or the ka, keep receiving while iSeparatorPosition == -1 and not isKeepAlive: retVal = self._read( 1024 ) #If error return if not retVal[ 'OK' ]: return retVal #If closed return error if not retVal[ 'Value' ]: return S_ERROR( "Peer closed connection" ) #New data! self.byteStream += retVal[ 'Value' ] #Look again for either message length of ka magic string iSeparatorPosition = self.byteStream.find( ":", 0, 10 ) isKeepAlive = self.byteStream.find( BaseTransport.keepAliveMagic, 0, keepAliveMagicLen ) == 0 #Over the limit? if maxBufferSize and len( self.byteStream ) > maxBufferSize and iSeparatorPosition == -1 : return S_ERROR( "Read limit exceeded (%s chars)" % maxBufferSize ) #Keep alive magic! if isKeepAlive: gLogger.debug( "Received keep alive header" ) #Remove the ka magic from the buffer and process the keep alive self.byteStream = self.byteStream[ keepAliveMagicLen: ] return self.__processKeepAlive( maxBufferSize, blockAfterKeepAlive ) #From here it must be a real message! #Process the size and remove the msg length from the bytestream size = int( self.byteStream[ :iSeparatorPosition ] ) self.byteStream = self.byteStream[ iSeparatorPosition + 1: ] #Receive while there's still data to be received while len( self.byteStream ) < size: retVal = self._read( size - len( self.byteStream ), skipReadyCheck = True ) if not retVal[ 'OK' ]: return retVal if not retVal[ 'Value' ]: return S_ERROR( "Peer closed connection" ) self.byteStream += retVal[ 'Value' ] if maxBufferSize and len( self.byteStream ) > maxBufferSize: return S_ERROR( "Read limit exceeded (%s chars)" % maxBufferSize ) #Data is here! take it out from the bytestream, dencode and return data = self.byteStream[ :size ] self.byteStream = self.byteStream[ size: ] try: data = DEncode.decode( data )[0] except Exception, e: return S_ERROR( "Could not decode received data: %s" % str( e ) ) if idleReceive: self.receivedMessages.append( data ) return S_OK() return data
def receiveData( self, maxBufferSize = 0, blockAfterKeepAlive = True, idleReceive = False ): self.__updateLastActionTimestamp() if self.receivedMessages: return self.receivedMessages.pop( 0 ) #Buffer size can't be less than 0 maxBufferSize = max( maxBufferSize, 0 ) try: #Look either for message length of keep alive magic string iSeparatorPosition = self.byteStream.find( ":", 0, 10 ) keepAliveMagicLen = len( BaseTransport.keepAliveMagic ) isKeepAlive = self.byteStream.find( BaseTransport.keepAliveMagic, 0, keepAliveMagicLen ) == 0 #While not found the message length or the ka, keep receiving while iSeparatorPosition == -1 and not isKeepAlive: retVal = self._read( 1024 ) #If error return if not retVal[ 'OK' ]: return retVal #If closed return error if not retVal[ 'Value' ]: return S_ERROR( "Peer closed connection" ) #New data! self.byteStream += retVal[ 'Value' ] #Look again for either message length of ka magic string iSeparatorPosition = self.byteStream.find( ":", 0, 10 ) isKeepAlive = self.byteStream.find( BaseTransport.keepAliveMagic, 0, keepAliveMagicLen ) == 0 #Over the limit? if maxBufferSize and len( self.byteStream ) > maxBufferSize and iSeparatorPosition == -1 : return S_ERROR( "Read limit exceeded (%s chars)" % maxBufferSize ) #Keep alive magic! if isKeepAlive: gLogger.debug( "Received keep alive header" ) #Remove the ka magic from the buffer and process the keep alive self.byteStream = self.byteStream[ keepAliveMagicLen: ] return self.__processKeepAlive( maxBufferSize, blockAfterKeepAlive ) #From here it must be a real message! #Process the size and remove the msg length from the bytestream size = int( self.byteStream[ :iSeparatorPosition ] ) self.byteStream = self.byteStream[ iSeparatorPosition + 1: ] #Receive while there's still data to be received while len( self.byteStream ) < size: retVal = self._read( size - len( self.byteStream ), skipReadyCheck = True ) if not retVal[ 'OK' ]: return retVal if not retVal[ 'Value' ]: return S_ERROR( "Peer closed connection" ) self.byteStream += retVal[ 'Value' ] if maxBufferSize and len( self.byteStream ) > maxBufferSize: return S_ERROR( "Read limit exceeded (%s chars)" % maxBufferSize ) #Data is here! take it out from the bytestream, dencode and return data = self.byteStream[ :size ] self.byteStream = self.byteStream[ size: ] try: data = DEncode.decode( data )[0] except Exception, e: return S_ERROR( "Could not decode received data: %s" % str( e ) ) if idleReceive: self.receivedMessages.append( data ) return S_OK() return data
def close(self): gLogger.debug("Closing socket") try: #self.oSocket.shutdown() os.fsync(self.oSocket.fileno()) self.oSocket.close() except: pass
def close( self ): gLogger.debug( "Closing socket" ) try: #self.oSocket.shutdown() os.fsync( self.oSocket.fileno() ) self.oSocket.close() except: pass
def mergeWithLocal(self, extraCFG): self.lock() try: self.localCFG = self.localCFG.mergeWith(extraCFG) self.unlock() gLogger.debug("CFG merged") except Exception, e: self.unlock() return S_ERROR("Cannot merge with new cfg: %s" % str(e))
def handshake( self ): retVal = self.oSocketInfo.doServerHandshake() if not retVal[ 'OK' ]: return retVal creds = retVal[ 'Value' ] if not self.oSocket.session_reused(): gLogger.debug( "New session connecting from client at %s" % str( self.getRemoteAddress() ) ) for key in creds.keys(): self.peerCredentials[ key ] = creds[ key ]
def mergeWithLocal( self, extraCFG ): self.lock() try: self.localCFG = self.localCFG.mergeWith( extraCFG ) self.unlock() gLogger.debug( "CFG merged" ) except Exception, e: self.unlock() return S_ERROR( "Cannot merge with new cfg: %s" % str( e ) )
def __generateContextWithProxyString(self): proxyString = self.infoDict['proxyString'] self.setLocalCredentialsLocation((proxyString, proxyString)) gLogger.debug("Using string proxy") retVal = self.__createContext() if not retVal['OK']: return retVal self.sslContext.use_certificate_chain_string(proxyString) self.sslContext.use_privatekey_string(proxyString) return S_OK()
def initAsClient( self ): retVal = gSocketInfoFactory.getSocket( self.stServerAddress, **self.extraArgsDict ) if not retVal[ 'OK' ]: return retVal self.oSocketInfo = retVal[ 'Value' ] self.oSocket = self.oSocketInfo.getSSLSocket() if not self.oSocket.session_reused(): gLogger.debug( "New session connecting to server at %s" % str( self.stServerAddress ) ) self.remoteAddress = self.oSocket.getpeername() return S_OK()
def handshake( self ): retVal = self.oSocketInfo.doServerHandshake() if not retVal[ 'OK' ]: return retVal creds = retVal[ 'Value' ] if not self.oSocket.session_reused(): gLogger.debug( "New session connecting from client at %s" % str( self.getRemoteAddress() ) ) for key in creds.keys(): self.peerCredentials[ key ] = creds[ key ] return S_OK()
def initAsClient( self ): retVal = gSocketInfoFactory.getSocket( self.stServerAddress, **self.extraArgsDict ) if not retVal[ 'OK' ]: return retVal self.oSocketInfo = retVal[ 'Value' ] self.oSocket = self.oSocketInfo.getSSLSocket() if not self.oSocket.session_reused(): gLogger.debug( "New session connecting to server at %s" % str( self.stServerAddress ) ) self.remoteAddress = self.oSocket.getpeername() return S_OK()
def __generateContextWithProxyString( self ): proxyString = self.infoDict[ 'proxyString' ] self.setLocalCredentialsLocation( ( proxyString, proxyString ) ) gLogger.debug( "Using string proxy" ) retVal = self.__createContext() if not retVal[ 'OK' ]: return retVal self.sslContext.use_certificate_chain_string( proxyString ) self.sslContext.use_privatekey_string( proxyString ) return S_OK()
def autoRefreshAndPublish(self, sURL): gLogger.debug("Setting configuration refresh as automatic") if not gConfigurationData.getAutoPublish(): gLogger.debug("Slave server won't auto publish itself") if not gConfigurationData.getName(): import DIRAC DIRAC.abort(10, "Missing configuration name!") self.__url = sURL self.__automaticUpdate = True self.setDaemon(1) self.start()
def autoRefreshAndPublish( self, sURL ): gLogger.debug( "Setting configuration refresh as automatic" ) if not gConfigurationData.getAutoPublish(): gLogger.debug( "Slave server won't auto publish itself" ) if not gConfigurationData.getName(): import DIRAC DIRAC.abort( 10, "Missing configuration name!" ) self.__url = sURL self.__automaticUpdate = True self.setDaemon( 1 ) self.start()
def __generateServerContext(self): retVal = self.__generateContextWithCerts() if not retVal['OK']: return retVal self.sslContext.set_session_id("DISETConnection%s" % str(time.time())) # self.sslContext.get_cert_store().set_flags( GSI.crypto.X509_CRL_CHECK ) if 'SSLSessionTimeout' in self.infoDict: timeout = int(self.infoDict['SSLSessionTimeout']) gLogger.debug("Setting session timeout to %s" % timeout) self.sslContext.set_session_timeout(timeout) return S_OK()
def __generateServerContext( self ): retVal = self.__generateContextWithCerts() if not retVal[ 'OK' ]: return retVal self.sslContext.set_session_id( "DISETConnection%s" % str( time.time() ) ) #self.sslContext.get_cert_store().set_flags( GSI.crypto.X509_CRL_CHECK ) if 'SSLSessionTimeout' in self.infoDict: timeout = int( self.infoDict['SSLSessionTimeout'] ) gLogger.debug( "Setting session timeout to %s" % timeout ) self.sslContext.set_session_timeout( timeout ) return S_OK()
def __RPCCallFunction(self, method, args): """ Check the arguments then call the RPC function :type method: string :param method: arguments sended by remote client :return: S_OK/S_ERROR """ realMethod = "export_%s" % method gLogger.debug("RPC to %s" % realMethod) try: # Get the method we are trying to call oMethod = getattr(self, realMethod) except BaseException: return S_ERROR("Unknown method %s" % method) # Check if the client sends correct arguments dRetVal = self.__checkExpectedArgumentTypes(method, args) if not dRetVal['OK']: return dRetVal # Lock the method with Semaphore to avoid too many calls at the same time self.__lockManager.lock("RPC/%s" % method) # 18.02.19 WARNING CHRIS # The line bellow adds the current transportID to the message broker # First of all, I do not see why it is doing so. # Second, this affects only one every other socket, since the # message broker selects on that one, and in the meantime, many sockets # are added and removed, without even being seen by the message broker. # Finally, there seem to be a double read on the socket: from the message broker # and from the ServiceReactor, resulting in conflict. # This is warned in the man page of "select". # it has been exhibited when testing M2Crypto. # I will comment it out, and try to put it in a separate commit when merging # self.__msgBroker.addTransportId(self.__trid, # self.serviceInfoDict['serviceName'], # idleRead=True) try: try: # Trying to execute the method uReturnValue = oMethod(*args) return uReturnValue finally: # Unlock method self.__lockManager.unlock("RPC/%s" % method) # 18.02.19 WARNING CHRIS # See comment above # self.__msgBroker.removeTransport(self.__trid, closeTransport=False) except Exception as e: gLogger.exception("Uncaught exception when serving RPC", "Function %s" % method, lException=e) return S_ERROR("Server error while serving %s: %s" % (method, str(e)))
def _connect( self ): if not self.__initStatus[ 'OK' ]: return self.__initStatus if self.__enableThreadCheck: self.__checkThreadID() gLogger.debug( "Connecting to: %s" % self.serviceURL ) try: transport = gProtocolDict[ self.__URLTuple[0] ][ 'transport' ]( self.__URLTuple[1:3], **self.kwargs ) retVal = transport.initAsClient() if not retVal[ 'OK' ]: return S_ERROR( "Can't connect to %s: %s" % ( self.serviceURL, retVal ) ) except Exception, e: return S_ERROR( "Can't connect to %s: %s" % ( self.serviceURL, e ) )
def __findServiceURL( self ): if not self.__initStatus[ 'OK' ]: return self.__initStatus gatewayURL = False if self.KW_IGNORE_GATEWAYS not in self.kwargs or not self.kwargs[ self.KW_IGNORE_GATEWAYS ]: dRetVal = gConfig.getOption( "/DIRAC/Gateways/%s" % DIRAC.siteName() ) if dRetVal[ 'OK' ]: rawGatewayURL = List.randomize( List.fromChar( dRetVal[ 'Value'], "," ) )[0] gatewayURL = "/".join( rawGatewayURL.split( "/" )[:3] ) for protocol in gProtocolDict.keys(): if self._destinationSrv.find( "%s://" % protocol ) == 0: gLogger.debug( "Already given a valid url", self._destinationSrv ) if not gatewayURL: return S_OK( self._destinationSrv ) gLogger.debug( "Reconstructing given URL to pass through gateway" ) path = "/".join( self._destinationSrv.split( "/" )[3:] ) finalURL = "%s/%s" % ( gatewayURL, path ) gLogger.debug( "Gateway URL conversion:\n %s -> %s" % ( self._destinationSrv, finalURL ) ) return S_OK( finalURL ) if gatewayURL: gLogger.debug( "Using gateway", gatewayURL ) return S_OK( "%s/%s" % ( gatewayURL, self._destinationSrv ) ) try: urls = getServiceURL( self._destinationSrv, setup = self.setup ) except Exception, e: return S_ERROR( "Cannot get URL for %s in setup %s: %s" % ( self._destinationSrv, self.setup, str( e ) ) )
def _refresh(self, fromMaster=False): """ Refresh configuration """ self._lastUpdateTime = time.time() gLogger.debug("Refreshing configuration...") gatewayList = getGatewayURLs("Configuration/Server") updatingErrorsList = [] if gatewayList: initialServerList = gatewayList gLogger.debug("Using configuration gateway", str(initialServerList[0])) elif fromMaster: masterServer = gConfigurationData.getMasterServer() initialServerList = [masterServer] gLogger.debug("Refreshing from master %s" % masterServer) else: initialServerList = gConfigurationData.getServers() gLogger.debug("Refreshing from list %s" % str(initialServerList)) # If no servers in the initial list, we are supposed to use the local configuration only if not initialServerList: return S_OK() randomServerList = List.randomize(initialServerList) gLogger.debug("Randomized server list is %s" % ", ".join(randomServerList)) for sServer in randomServerList: from DIRAC.ConfigurationSystem.Client.ConfigurationClient import ConfigurationClient oClient = ConfigurationClient( url=sServer, useCertificates=gConfigurationData.useServerCertificate(), skipCACheck=gConfigurationData.skipCACheck(), ) dRetVal = _updateFromRemoteLocation(oClient) if dRetVal["OK"]: self._refreshTime = gConfigurationData.getRefreshTime() return dRetVal else: updatingErrorsList.append(dRetVal["Message"]) gLogger.warn( "Can't update from server", "Error while updating from %s: %s" % (sServer, dRetVal["Message"])) if dRetVal["Message"].find("Insane environment") > -1: break return S_ERROR("Reason(s):\n\t%s" % "\n\t".join(List.uniqueElements(updatingErrorsList)))
def __processKeepAlive( self, maxBufferSize, blockAfterKeepAlive = True ): gLogger.debug( "Received Keep Alive" ) #Next message down the stream will be the ka data result = self.receiveData( maxBufferSize, blockAfterKeepAlive = False ) if not result[ 'OK' ]: gLogger.debug( "Error while receiving keep alive: %s" % result[ 'Message' ] ) return result #Is it a valid ka? kaData = result[ 'Value' ] for reqField in ( 'id', 'kaping' ): if reqField not in kaData: errMsg = "Invalid keep alive, missing %s" % reqField gLogger.debug( errMsg ) return S_ERROR( errMsg ) gLogger.debug( "Received keep alive id %s" % kaData ) #Need to check if it's one of the keep alives we sent or one started from the other side if kaData[ 'kaping' ]: #This is a keep alive PING. Let's send the PONG self.sendKeepAlive( responseId = kaData[ 'id' ] ) else: #If it's a pong then we flag that we don't need to wait for a pong self.waitingForKeepAlivePong = False #No blockAfterKeepAlive means return without further read if not blockAfterKeepAlive: result = S_OK() result[ 'keepAlive' ] = True return result #Let's listen for the next message downstream return self.receiveData( maxBufferSize, blockAfterKeepAlive )
def __processKeepAlive(self, maxBufferSize, blockAfterKeepAlive=True): gLogger.debug("Received Keep Alive") #Next message down the stream will be the ka data result = self.receiveData(maxBufferSize, blockAfterKeepAlive=False) if not result['OK']: gLogger.debug("Error while receiving keep alive: %s" % result['Message']) return result #Is it a valid ka? kaData = result['Value'] for reqField in ('id', 'kaping'): if reqField not in kaData: errMsg = "Invalid keep alive, missing %s" % reqField gLogger.debug(errMsg) return S_ERROR(errMsg) gLogger.debug("Received keep alive id %s" % kaData) #Need to check if it's one of the keep alives we sent or one started from the other side if kaData['kaping']: #This is a keep alive PING. Let's send the PONG self.sendKeepAlive(responseId=kaData['id']) else: #If it's a pong then we flag that we don't need to wait for a pong self.waitingForKeepAlivePong = False #No blockAfterKeepAlive means return without further read if not blockAfterKeepAlive: result = S_OK() result['keepAlive'] = True return result #Let's listen for the next message downstream return self.receiveData(maxBufferSize, blockAfterKeepAlive)
def __generateContextWithCerts( self ): certKeyTuple = Locations.getHostCertificateAndKeyLocation() if not certKeyTuple: return S_ERROR( "No valid certificate or key found" ) self.setLocalCredentialsLocation( certKeyTuple ) gLogger.debug( "Using certificate %s\nUsing key %s" % certKeyTuple ) retVal = self.__createContext() if not retVal[ 'OK' ]: return retVal #Verify depth to 20 to ensure accepting proxies of proxies of proxies.... self.sslContext.set_verify_depth( 50 ) self.sslContext.use_certificate_chain_file( certKeyTuple[0] ) self.sslContext.use_privatekey_file( certKeyTuple[1] ) return S_OK()
def _connect( self ): self.__discoverExtraCredentials() if not self.__initStatus[ 'OK' ]: return self.__initStatus if self.__enableThreadCheck: self.__checkThreadID() gLogger.debug( "Connecting to: %s" % self.serviceURL ) try: transport = gProtocolDict[ self.__URLTuple[0] ][ 'transport' ]( self.__URLTuple[1:3], **self.kwargs ) retVal = transport.initAsClient() if not retVal[ 'OK' ]: return S_ERROR( "Can't connect to %s: %s" % ( self.serviceURL, retVal ) ) except Exception, e: return S_ERROR( "Can't connect to %s: %s" % ( self.serviceURL, e ) )
def __refresh(self): self.__lastUpdateTime = time.time() gLogger.debug("Refreshing configuration...") gatewayList = getGatewayURLs("Configuration/Server") updatingErrorsList = [] if gatewayList: lInitialListOfServers = gatewayList gLogger.debug("Using configuration gateway", str(lInitialListOfServers[0])) else: lInitialListOfServers = gConfigurationData.getServers() gLogger.debug("Refreshing from list %s" % str(lInitialListOfServers)) lRandomListOfServers = List.randomize(lInitialListOfServers) gLogger.debug("Randomized server list is %s" % ", ".join(lRandomListOfServers)) for sServer in lRandomListOfServers: from DIRAC.Core.DISET.RPCClient import RPCClient oClient = RPCClient( sServer, useCertificates=gConfigurationData.useServerCertificate(), skipCACheck=gConfigurationData.skipCACheck()) dRetVal = _updateFromRemoteLocation(oClient) if dRetVal['OK']: return dRetVal else: updatingErrorsList.append(dRetVal['Message']) gLogger.warn( "Can't update from server", "Error while updating from %s: %s" % (sServer, dRetVal['Message'])) return S_ERROR("Reason(s):\n\t%s" % "\n\t".join(List.uniqueElements(updatingErrorsList)))
def __refresh( self ): self.__lastUpdateTime = time.time() gLogger.debug( "Refreshing configuration..." ) gatewayList = getGatewayURLs( "Configuration/Server" ) updatingErrorsList = [] if gatewayList: lInitialListOfServers = gatewayList gLogger.debug( "Using configuration gateway", str( lInitialListOfServers[0] ) ) else: lInitialListOfServers = gConfigurationData.getServers() gLogger.debug( "Refreshing from list %s" % str( lInitialListOfServers ) ) lRandomListOfServers = List.randomize( lInitialListOfServers ) gLogger.debug( "Randomized server list is %s" % ", ".join( lRandomListOfServers ) ) for sServer in lRandomListOfServers: from DIRAC.Core.DISET.RPCClient import RPCClient oClient = RPCClient( sServer, useCertificates = gConfigurationData.useServerCertificate(), skipCACheck = gConfigurationData.skipCACheck() ) dRetVal = _updateFromRemoteLocation( oClient ) if dRetVal[ 'OK' ]: return dRetVal else: updatingErrorsList.append( dRetVal[ 'Message' ] ) gLogger.warn( "Can't update from server", "Error while updating from %s: %s" % ( sServer, dRetVal[ 'Message' ] ) ) if dRetVal[ 'Message' ].find( "Insane environment" ) > -1: break return S_ERROR( "Reason(s):\n\t%s" % "\n\t".join( List.uniqueElements( updatingErrorsList ) ) )
def __generateContextWithCerts(self): certKeyTuple = Locations.getHostCertificateAndKeyLocation() if not certKeyTuple: return S_ERROR("No valid certificate or key found") self.setLocalCredentialsLocation(certKeyTuple) gLogger.debug("Using certificate %s\nUsing key %s" % certKeyTuple) retVal = self.__createContext() if not retVal['OK']: return retVal # Verify depth to 20 to ensure accepting proxies of proxies of proxies.... self.sslContext.set_verify_depth(VERIFY_DEPTH) self.sslContext.use_certificate_chain_file(certKeyTuple[0]) self.sslContext.use_privatekey_file(certKeyTuple[1]) return S_OK()
def __sslHandshake(self): """ Do the SSL Handshake :return: S_ERROR / S_OK with dictionary of user credentials """ start = time.time() timeout = self.infoDict['timeout'] while True: if timeout: if time.time() - start > timeout: return S_ERROR("Handshake timeout exceeded") try: self.sslSocket.do_handshake() break except GSI.SSL.WantReadError: time.sleep(0.001) except GSI.SSL.WantWriteError: time.sleep(0.001) except GSI.SSL.Error as v: if self.__retry < 3: self.__retry += 1 return self.__sslHandshake() else: # gLogger.warn( "Error while handshaking", "\n".join( [ stError[2] for stError in v.args[0] ] ) ) gLogger.warn("Error while handshaking", v) return S_ERROR("Error while handshaking") except Exception as v: gLogger.warn("Error while handshaking", v) if self.__retry < 3: self.__retry += 1 return self.__sslHandshake() else: # gLogger.warn( "Error while handshaking", "\n".join( [ stError[2] for stError in v.args[0] ] ) ) gLogger.warn("Error while handshaking", v) return S_ERROR("Error while handshaking") credentialsDict = self.gatherPeerCredentials() if self.infoDict['clientMode']: hostnameCN = credentialsDict['CN'] # if hostnameCN.split("/")[-1] != self.infoDict[ 'hostname' ]: if not self.__isSameHost(hostnameCN, self.infoDict['hostname']): gLogger.warn( "Server is not who it's supposed to be", "Connecting to %s and it's %s" % (self.infoDict['hostname'], hostnameCN)) gLogger.debug("", "Authenticated peer (%s)" % credentialsDict['DN']) return S_OK(credentialsDict)
def handshake( self ): """ Initiate the client-server handshake and extract credentials :return: S_OK (with credentialDict if new session) """ retVal = self.oSocketInfo.doServerHandshake() if not retVal[ 'OK' ]: return retVal creds = retVal[ 'Value' ] if not self.oSocket.session_reused(): gLogger.debug( "New session connecting from client at %s" % str( self.getRemoteAddress() ) ) for key in creds.keys(): self.peerCredentials[ key ] = creds[ key ] return S_OK()
def sync( self ): gLogger.debug( "Updating configuration internals" ) self.mergedCFG = self.remoteCFG.mergeWith( self.localCFG ) self.remoteServerList = [] localServers = self.extractOptionFromCFG( "%s/Servers" % self.configurationPath, self.localCFG, disableDangerZones = True ) if localServers: self.remoteServerList.extend( List.fromChar( localServers, "," ) ) remoteServers = self.extractOptionFromCFG( "%s/Servers" % self.configurationPath, self.remoteCFG, disableDangerZones = True ) if remoteServers: self.remoteServerList.extend( List.fromChar( remoteServers, "," ) ) self.remoteServerList = List.uniqueElements( self.remoteServerList ) self.compressedConfigurationData = zlib.compress( str( self.remoteCFG ), 9 )
def handshake(self): """ Initiate the client-server handshake and extract credentials :return: S_OK (with credentialDict if new session) """ retVal = self.oSocketInfo.doServerHandshake() if not retVal['OK']: return retVal creds = retVal['Value'] if not self.oSocket.session_reused(): gLogger.debug("New session connecting from client at %s" % str(self.getRemoteAddress())) for key in creds.keys(): self.peerCredentials[key] = creds[key] return S_OK()
def _proposeAction(self, transport, action): if not self.__initStatus["OK"]: return self.__initStatus stConnectionInfo = ((self.__URLTuple[3], self.setup, self.vo), action, self.__extraCredentials) retVal = transport.sendData(S_OK(stConnectionInfo)) if not retVal["OK"]: return retVal serverReturn = transport.receiveData() # TODO: Check if delegation is required if serverReturn["OK"] and "Value" in serverReturn and type(serverReturn["Value"]) == types.DictType: gLogger.debug("There is a server requirement") serverRequirements = serverReturn["Value"] if "delegate" in serverRequirements: gLogger.debug("A delegation is requested") serverReturn = self.__delegateCredentials(transport, serverRequirements["delegate"]) return serverReturn
def sync( self ): gLogger.debug( "Updating configuration internals" ) self.mergedCFG = self.remoteCFG.mergeWith( self.localCFG ) self.remoteServerList = [] localServers = self.extractOptionFromCFG( "%s/Servers" % self.configurationPath, self.localCFG, disableDangerZones = True ) if localServers: self.remoteServerList.extend( List.fromChar( localServers, "," ) ) remoteServers = self.extractOptionFromCFG( "%s/Servers" % self.configurationPath, self.remoteCFG, disableDangerZones = True ) if remoteServers: self.remoteServerList.extend( List.fromChar( remoteServers, "," ) ) self.remoteServerList = List.uniqueElements( self.remoteServerList ) self.compressedConfigurationData = zlib.compress( str( self.remoteCFG ), 9 )
def __generateContextWithProxy( self ): if 'proxyLocation' in self.infoDict: proxyPath = self.infoDict[ 'proxyLocation' ] if not os.path.isfile( proxyPath ): return S_ERROR( "Defined proxy is not a file" ) else: proxyPath = Locations.getProxyLocation() if not proxyPath: return S_ERROR( "No valid proxy found" ) self.setLocalCredentialsLocation( ( proxyPath, proxyPath ) ) gLogger.debug( "Using proxy %s" % proxyPath ) retVal = self.__createContext() if not retVal[ 'OK' ]: return retVal self.sslContext.use_certificate_chain_file( proxyPath ) self.sslContext.use_privatekey_file( proxyPath ) return S_OK()
def _proposeAction(self, transport, action): """ Proposes an action by sending a tuple containing * System/Component * Setup * VO * action * extraCredentials It is kind of a handshake. The server might ask for a delegation, in which case it is done here. The result of the delegation is then returned. :param transport: the Transport object returned by _connect :param action: tuple (<action type>, <action name>). It depends on the subclasses of BaseClient. <action type> can be for example 'RPC' or 'FileTransfer' :return: whatever the server sent back """ if not self.__initStatus['OK']: return self.__initStatus stConnectionInfo = ((self.__URLTuple[3], self.setup, self.vo), action, self.__extraCredentials) # Send the connection info and get the answer back retVal = transport.sendData(S_OK(stConnectionInfo)) if not retVal['OK']: return retVal serverReturn = transport.receiveData() # TODO: Check if delegation is required. This seems to be used only for the GatewayService if serverReturn['OK'] and 'Value' in serverReturn and isinstance(serverReturn['Value'], dict): gLogger.debug("There is a server requirement") serverRequirements = serverReturn['Value'] if 'delegate' in serverRequirements: gLogger.debug("A delegation is requested") serverReturn = self.__delegateCredentials(transport, serverRequirements['delegate']) return serverReturn
def __init__( self, loadDefaultCFG = True ): lr = LockRing() self.threadingEvent = lr.getEvent() self.threadingEvent.set() self.threadingLock = lr.getLock() self.runningThreadsNumber = 0 self.compressedConfigurationData = "" self.configurationPath = "/DIRAC/Configuration" self.backupsDir = os.path.join( DIRAC.rootPath, "etc", "csbackup" ) self._isService = False self.localCFG = CFG() self.remoteCFG = CFG() self.mergedCFG = CFG() self.remoteServerList = [] if loadDefaultCFG: defaultCFGFile = os.path.join( DIRAC.rootPath, "etc", "dirac.cfg" ) gLogger.debug( "dirac.cfg should be at", "%s" % defaultCFGFile ) retVal = self.loadFile( defaultCFGFile ) if not retVal[ 'OK' ]: gLogger.warn( "Can't load %s file" % defaultCFGFile ) self.sync()
def __RPCCallFunction(self, method, args): realMethod = "export_%s" % method gLogger.debug("RPC to %s" % realMethod) try: oMethod = getattr(self, realMethod) except: return S_ERROR("Unknown method %s" % method) dRetVal = self.__checkExpectedArgumentTypes(method, args) if not dRetVal["OK"]: return dRetVal self.__lockManager.lock(method) self.__msgBroker.addTransportId(self.__trid, self.serviceInfoDict["serviceName"], idleRead=True) try: try: uReturnValue = oMethod(*args) return uReturnValue finally: self.__lockManager.unlock(method) self.__msgBroker.removeTransport(self.__trid, closeTransport=False) except Exception, v: gLogger.exception("Uncaught exception when serving RPC", "Function %s" % method) return S_ERROR("Server error while serving %s: %s" % (method, str(v)))
def __refresh( self, fromMaster = False ): self.__lastUpdateTime = time.time() gLogger.debug( "Refreshing configuration..." ) gatewayList = getGatewayURLs( "Configuration/Server" ) updatingErrorsList = [] if gatewayList: initialServerList = gatewayList gLogger.debug( "Using configuration gateway", str( initialServerList[0] ) ) elif fromMaster: masterServer = gConfigurationData.getMasterServer() initialServerList = [masterServer] gLogger.debug( "Refreshing from master %s" % masterServer ) else: initialServerList = gConfigurationData.getServers() gLogger.debug( "Refreshing from list %s" % str( initialServerList ) ) # If no servers in the initial list, we are supposed to use the local configuration only if not initialServerList: return S_OK() randomServerList = List.randomize( initialServerList ) gLogger.debug( "Randomized server list is %s" % ", ".join( randomServerList ) ) for sServer in randomServerList: from DIRAC.Core.DISET.RPCClient import RPCClient oClient = RPCClient( sServer, useCertificates = gConfigurationData.useServerCertificate(), skipCACheck = gConfigurationData.skipCACheck() ) dRetVal = _updateFromRemoteLocation( oClient ) if dRetVal[ 'OK' ]: return dRetVal else: updatingErrorsList.append( dRetVal[ 'Message' ] ) gLogger.warn( "Can't update from server", "Error while updating from %s: %s" % ( sServer, dRetVal[ 'Message' ] ) ) if dRetVal[ 'Message' ].find( "Insane environment" ) > -1: break return S_ERROR( "Reason(s):\n\t%s" % "\n\t".join( List.uniqueElements( updatingErrorsList ) ) )
def __RPCCallFunction(self, method, args): """ Check the arguments then call the RPC function :type method: string :param method: arguments sended by remote client :return: S_OK/S_ERROR """ realMethod = "export_%s" % method gLogger.debug("RPC to %s" % realMethod) try: # Get the method we are trying to call oMethod = getattr(self, realMethod) except: return S_ERROR("Unknown method %s" % method) # Check if the client sends correct arguments dRetVal = self.__checkExpectedArgumentTypes(method, args) if not dRetVal['OK']: return dRetVal # Lock the method with Semaphore to avoid too many calls at the same time self.__lockManager.lock("RPC/%s" % method) self.__msgBroker.addTransportId(self.__trid, self.serviceInfoDict['serviceName'], idleRead=True) try: try: # Trying to execute the method uReturnValue = oMethod(*args) return uReturnValue finally: # Unlock method self.__lockManager.unlock("RPC/%s" % method) self.__msgBroker.removeTransport(self.__trid, closeTransport=False) except Exception as e: gLogger.exception("Uncaught exception when serving RPC", "Function %s" % method, lException=e) return S_ERROR("Server error while serving %s: %s" % (method, str(e)))