def generateRevokedCertsFile(location=None): """ Generate a single CA file with all the PEMs :param str location: we can specify a specific location in CS :return file crls.pem which contains all revoked certificates """ caDir = Locations.getCAsLocation() for fn in (os.path.join(os.path.dirname(caDir), "crls.pem"), os.path.join(os.path.dirname(Locations.getHostCertificateAndKeyLocation(location)[0]), "crls.pem"), False): if not fn: fn = tempfile.mkstemp(prefix="crls", suffix=".pem")[1] try: with open(fn, "w") as fd: for caFile in os.listdir(caDir): caFile = os.path.join(caDir, caFile) result = X509CRL.X509CRL.instanceFromFile(caFile) if not result['OK']: continue chain = result['Value'] fd.write(chain.dumpAllToString()['Value']) return S_OK(fn) except IOError: continue return S_ERROR(caDir)
def HTTPSKey(): key = Locations.getHostCertificateAndKeyLocation() if key: key = key[1] else: key = "/opt/dirac/etc/grid-security/hostkey.pem" return getCSValue( "HTTPS/Key", key )
def __init__( self, server = False, serverCert = False, serverKey = False, voName = False, timeout = False ): if timeout: self._secCmdTimeout = timeout else: self._secCmdTimeout = 30 if not server: self._secServer = gConfig.getValue( "/DIRAC/VOPolicy/MyProxyServer", "myproxy.cern.ch" ) else: self._secServer = server if not voName: self._secVO = getVO( "unknown" ) else: self._secVO = voName ckLoc = Locations.getHostCertificateAndKeyLocation() if serverCert: self._secCertLoc = serverCert else: if ckLoc: self._secCertLoc = ckLoc[0] else: self._secCertLoc = "%s/etc/grid-security/servercert.pem" % DIRAC.rootPath if serverKey: self._secKeyLoc = serverKey else: if ckLoc: self._secKeyLoc = ckLoc[1] else: self._secKeyLoc = "%s/etc/grid-security/serverkey.pem" % DIRAC.rootPath self._secRunningFromTrustedHost = gConfig.getValue( "/DIRAC/VOPolicy/MyProxyTrustedHost", "True" ).lower() in ( "y", "yes", "true" ) self._secMaxProxyHours = gConfig.getValue( "/DIRAC/VOPolicy/MyProxyMaxDelegationTime", 168 )
def HTTPSCert(): cert = Locations.getHostCertificateAndKeyLocation() if cert: cert = cert[0] else: cert = "/opt/dirac/etc/grid-security/hostcert.pem" return getCSValue("HTTPS/Cert", cert)
def HTTPSCert(): cert = Locations.getHostCertificateAndKeyLocation() if cert: cert = cert[0] else: cert = "/opt/dirac/etc/grid-security/hostcert.pem" return getCSValue( "HTTPS/Cert", cert )
def HTTPSKey(): key = Locations.getHostCertificateAndKeyLocation() if key: key = key[1] else: key = "/opt/dirac/etc/grid-security/hostkey.pem" return getCSValue("HTTPS/Key", key)
def generateRevokedCertsFile(location=None): """ Generate a single CA file with all the PEMs :param str location: we can specify a specific location in CS :return: file crls.pem which contains all revoked certificates """ caDir = Locations.getCAsLocation() for fn in ( os.path.join(os.path.dirname(caDir), "crls.pem"), os.path.join( os.path.dirname( Locations.getHostCertificateAndKeyLocation(location)[0]), "crls.pem"), False): if not fn: fn = tempfile.mkstemp(prefix="crls", suffix=".pem")[1] try: with open(fn, "w") as fd: for caFile in os.listdir(caDir): caFile = os.path.join(caDir, caFile) result = X509CRL.X509CRL.instanceFromFile(caFile) if not result['OK']: continue chain = result['Value'] fd.write(chain.dumpAllToString()['Value']) return S_OK(fn) except IOError: continue return S_ERROR(caDir)
def __init__(self, server=False, serverCert=False, serverKey=False, timeout=False): if timeout: self._secCmdTimeout = timeout else: self._secCmdTimeout = 30 if not server: self._secServer = gConfig.getValue("/DIRAC/VOPolicy/MyProxyServer", "myproxy.cern.ch") else: self._secServer = server ckLoc = Locations.getHostCertificateAndKeyLocation() if serverCert: self._secCertLoc = serverCert else: if ckLoc: self._secCertLoc = ckLoc[0] else: self._secCertLoc = "%s/etc/grid-security/servercert.pem" % DIRAC.rootPath if serverKey: self._secKeyLoc = serverKey else: if ckLoc: self._secKeyLoc = ckLoc[1] else: self._secKeyLoc = "%s/etc/grid-security/serverkey.pem" % DIRAC.rootPath self._secRunningFromTrustedHost = gConfig.getValue( "/DIRAC/VOPolicy/MyProxyTrustedHost", "True").lower() in ("y", "yes", "true") self._secMaxProxyHours = gConfig.getValue( "/DIRAC/VOPolicy/MyProxyMaxDelegationTime", 168)
def getCert(): """ get the host certificate """ cert = Locations.getHostCertificateAndKeyLocation() if cert: cert = cert[0] else: cert = "/opt/dirac/etc/grid-security/hostcert.pem" return cert
def __generateContextWithCerts(self): certKeyTuple = Locations.getHostCertificateAndKeyLocation() if not certKeyTuple: return S_ERROR("No valid certificate or key found") self.setLocalCredentialsLocation(certKeyTuple) gLogger.debug("Using certificate %s\nUsing key %s" % certKeyTuple) retVal = self.__createContext() if not retVal['OK']: return retVal # Verify depth to 20 to ensure accepting proxies of proxies of proxies.... self.sslContext.set_verify_depth(VERIFY_DEPTH) self.sslContext.use_certificate_chain_file(certKeyTuple[0]) self.sslContext.use_privatekey_file(certKeyTuple[1]) return S_OK()
def __generateContextWithCerts( self ): certKeyTuple = Locations.getHostCertificateAndKeyLocation() if not certKeyTuple: return S_ERROR( "No valid certificate or key found" ) self.setLocalCredentialsLocation( certKeyTuple ) gLogger.debug( "Using certificate %s\nUsing key %s" % certKeyTuple ) retVal = self.__createContext() if not retVal[ 'OK' ]: return retVal #Verify depth to 20 to ensure accepting proxies of proxies of proxies.... self.sslContext.set_verify_depth( 50 ) self.sslContext.use_certificate_chain_file( certKeyTuple[0] ) self.sslContext.use_privatekey_file( certKeyTuple[1] ) return S_OK()
def __getCertificateID(self): certLocation = Locations.getHostCertificateAndKeyLocation() if not certLocation: gLogger.error("No certificate found!") return False chain = X509Chain() retVal = chain.loadChainFromFile(certLocation[0]) if not retVal['OK']: gLogger.error("Can't parse certificate!", retVal['Message']) return False idCert = chain.getIssuerCert()['Value'] self.__userDN = idCert.getSubjectDN()['Value'] self.__userGroup = 'host' return True
def __getCertificateID( self ): certLocation = Locations.getHostCertificateAndKeyLocation() if not certLocation: gLogger.error( "No certificate found!" ) return False chain = X509Chain() retVal = chain.loadChainFromFile( certLocation[ 0 ] ) if not retVal[ 'OK' ]: gLogger.error( "Can't parse certificate!", retVal[ 'Message' ] ) return False idCert = chain.getIssuerCert()[ 'Value' ] self.__userDN = idCert.getSubjectDN()[ 'Value' ] self.__userGroup = 'host' return True
def startTornado(self): """ Starts the tornado server when ready. This method never returns. """ sLog.debug("Starting Tornado") self._initMonitoring() router = Application(self.urls, debug=False, compress_response=True) certs = Locations.getHostCertificateAndKeyLocation() if certs is False: sLog.fatal("Host certificates not found ! Can't start the Server") raise ImportError("Unable to load certificates") ca = Locations.getCAsLocation() ssl_options = { "certfile": certs[0], "keyfile": certs[1], "cert_reqs": M2Crypto.SSL.verify_peer, "ca_certs": ca, "sslDebug": DEBUG_M2CRYPTO, # Set to true if you want to see the TLS debug messages } self.__monitorLastStatsUpdate = time.time() self.__report = self.__startReportToMonitoringLoop() # Starting monitoring, IOLoop waiting time in ms, __monitoringLoopDelay is defined in seconds tornado.ioloop.PeriodicCallback(self.__reportToMonitoring, self.__monitoringLoopDelay * 1000).start() # If we are running with python3, Tornado will use asyncio, # and we have to convince it to let us run in a different thread # Doing this ensures a consistent behavior between py2 and py3 if six.PY3: import asyncio # pylint: disable=import-error asyncio.set_event_loop_policy(tornado.platform.asyncio.AnyThreadEventLoopPolicy()) # Start server server = HTTPServer(router, ssl_options=ssl_options, decompress_request=True) try: server.listen(self.port) except Exception as e: # pylint: disable=broad-except sLog.exception("Exception starting HTTPServer", e) raise sLog.always("Listening on port %s" % self.port) for service in self.urls: sLog.debug("Available service: %s" % service) IOLoop.current().start()
def generateCAFile(location=None): """ Generate/find a single CA file with all the PEMs :param str location: we can specify a specific CS location where it's written a directory where to find the CAs and CRLs :return: directory where the file cas.pem which contains all certificates is found/created """ caDir = Locations.getCAsLocation() if not caDir: return S_ERROR('No CAs dir found') # look in what's normally /etc/grid-security/certificates if os.path.isfile(os.path.join(os.path.dirname(caDir), "cas.pem")): return S_OK(os.path.join(os.path.dirname(caDir), "cas.pem")) # look in what's normally /opt/dirac/etc/grid-security diracCADirPEM = os.path.join( os.path.dirname( Locations.getHostCertificateAndKeyLocation(location)[0]), "cas.pem") if os.path.isfile(diracCADirPEM): return S_OK(diracCADirPEM) # Now we create it in tmpdir fn = tempfile.mkstemp(prefix="cas.", suffix=".pem")[1] try: with open(fn, "w") as fd: for caFile in os.listdir(caDir): caFile = os.path.join(caDir, caFile) chain = X509Chain.X509Chain() result = chain.loadChainFromFile(caFile) if not result['OK']: continue expired = chain.hasExpired() if not expired['OK'] or expired['Value']: continue fd.write(chain.dumpAllToString()['Value']) gLogger.info("CAs used from: %s" % str(fn)) return S_OK(fn) except IOError as err: gLogger.warn(err) return S_ERROR("Could not find/generate CAs")
def __loadM2SSLCTXHostcert(ctx): """ Load hostcert & key from the default location and set them as the credentials for SSL context ctx. Returns None. """ certKeyTuple = Locations.getHostCertificateAndKeyLocation() if not certKeyTuple: raise RuntimeError("Hostcert/key location not set") hostcert, hostkey = certKeyTuple if not os.path.isfile(hostcert): raise RuntimeError("Hostcert file (%s) is missing" % hostcert) if not os.path.isfile(hostkey): raise RuntimeError("Hostkey file (%s) is missing" % hostkey) # Make sure we never stall on a password prompt if the hostkey has a password # by specifying a blank string. ctx.load_cert(hostcert, hostkey, callback=lambda: "")
def __deleteSandboxFromExternalBackend(self, SEName, SEPFN): if self.getCSOption("DelayedExternalDeletion", True): gLogger.info("Setting deletion request") try: # We need the hostDN used in order to pass these credentials to the # SandboxStoreDB.. hostCertLocation, _ = Locations.getHostCertificateAndKeyLocation( ) hostCert = X509Certificate.X509Certificate() hostCert.loadFromFile(hostCertLocation) hostDN = hostCert.getSubjectDN().get("Value") # use the host authentication to fetch the data result = self.sandboxDB.getSandboxOwner( SEName, SEPFN, hostDN, "hosts") if not result["OK"]: return result _owner, ownerDN, ownerGroup = result["Value"] request = Request() request.RequestName = "RemoteSBDeletion:%s|%s:%s" % ( SEName, SEPFN, time.time()) request.OwnerDN = ownerDN request.OwnerGroup = ownerGroup physicalRemoval = Operation() physicalRemoval.Type = "PhysicalRemoval" physicalRemoval.TargetSE = SEName fileToRemove = File() fileToRemove.PFN = SEPFN physicalRemoval.addFile(fileToRemove) request.addOperation(physicalRemoval) return ReqClient().putRequest(request) except Exception as e: gLogger.exception("Exception while setting deletion request") return S_ERROR(f"Cannot set deletion request: {e}") else: gLogger.info("Deleting external Sandbox") try: return StorageElement(SEName).removeFile(SEPFN) except Exception: gLogger.exception( "RM raised an exception while trying to delete a remote sandbox" ) return S_ERROR( "RM raised an exception while trying to delete a remote sandbox" )
def startTornado(self): """ Starts the tornado server when ready. This method never returns. """ sLog.debug("Starting Tornado") self._initMonitoring() router = Application(self.urls, debug=False, compress_response=True) certs = Locations.getHostCertificateAndKeyLocation() if certs is False: sLog.fatal("Host certificates not found ! Can't start the Server") raise ImportError("Unable to load certificates") ca = Locations.getCAsLocation() ssl_options = { 'certfile': certs[0], 'keyfile': certs[1], 'cert_reqs': M2Crypto.SSL.verify_peer, 'ca_certs': ca, 'sslDebug': False, # Set to true if you want to see the TLS debug messages } self.__monitorLastStatsUpdate = time.time() self.__report = self.__startReportToMonitoringLoop() # Starting monitoring, IOLoop waiting time in ms, __monitoringLoopDelay is defined in seconds tornado.ioloop.PeriodicCallback(self.__reportToMonitoring, self.__monitoringLoopDelay * 1000).start() # Start server server = HTTPServer(router, ssl_options=ssl_options, decompress_request=True) try: server.listen(self.port) except Exception as e: # pylint: disable=broad-except sLog.exception("Exception starting HTTPServer", e) raise sLog.always("Listening on port %s" % self.port) for service in self.urls: sLog.debug("Available service: %s" % service) IOLoop.current().start()
def generateRevokedCertsFile(location=None): """ Generate a single CA file with all the PEMs :param str location: we can specify a specific CS location where it's written a directory where to find the CAs and CRLs :return: directory where the file crls.pem which contains all CRLs is created """ caDir = Locations.getCAsLocation() if not caDir: return S_ERROR("No CAs dir found") # look in what's normally /etc/grid-security/certificates if os.path.isfile(os.path.join(os.path.dirname(caDir), "crls.pem")): return S_OK(os.path.join(os.path.dirname(caDir), "crls.pem")) # look in what's normally /opt/dirac/etc/grid-security diracCADirPEM = os.path.join( os.path.dirname( Locations.getHostCertificateAndKeyLocation(location)[0]), "crls.pem") if os.path.isfile(diracCADirPEM): return S_OK(diracCADirPEM) # Now we create it in tmpdir fn = tempfile.mkstemp(prefix="crls", suffix=".pem")[1] try: with open(fn, "wb") as fd: for caFile in os.listdir(caDir): caFile = os.path.join(caDir, caFile) result = X509CRL.X509CRL.instanceFromFile(caFile) if not result["OK"]: continue chain = result["Value"] fd.write(chain.dumpAllToString()["Value"]) return S_OK(fn) except IOError as err: gLogger.warn(err) return S_ERROR("Could not find/generate CRLs")
def delegate(delegationRequest, kwargs): """ Check delegate! """ if kwargs.get("useCertificates"): chain = X509Chain() certTuple = Locations.getHostCertificateAndKeyLocation() chain.loadChainFromFile(certTuple[0]) chain.loadKeyFromFile(certTuple[1]) elif "proxyObject" in kwargs: chain = kwargs["proxyObject"] else: if "proxyLocation" in kwargs: procLoc = kwargs["proxyLocation"] else: procLoc = Locations.getProxyLocation() chain = X509Chain() chain.loadChainFromFile(procLoc) chain.loadKeyFromFile(procLoc) return chain.generateChainFromRequestString(delegationRequest)
def delegate( delegationRequest, kwargs ): """ Check delegate! """ if "useCertificates" in kwargs and kwargs[ 'useCertificates' ]: chain = X509Chain() certTuple = Locations.getHostCertificateAndKeyLocation() chain.loadChainFromFile( certTuple[0] ) chain.loadKeyFromFile( certTuple[1] ) elif "proxyObject" in kwargs: chain = kwargs[ 'proxyObject' ] else: if "proxyLocation" in kwargs: procLoc = kwargs[ "proxyLocation" ] else: procLoc = Locations.getProxyLocation() chain = X509Chain() chain.loadChainFromFile( procLoc ) chain.loadKeyFromFile( procLoc ) return chain.generateChainFromRequestString( delegationRequest )
def __generateProxy( self ): self.log.info( "Generating proxy..." ) certLoc = Locations.getHostCertificateAndKeyLocation() if not certLoc: self.log.error( "Can not find certificate!" ) return False chain = X509Chain.X509Chain() result = chain.loadChainFromFile( certLoc[0] ) if not result[ 'OK' ]: self.log.error( "Can not load certificate file", "%s : %s" % ( certLoc[0], result[ 'Message' ] ) ) return False result = chain.loadKeyFromFile( certLoc[1] ) if not result[ 'OK' ]: self.log.error( "Can not load key file", "%s : %s" % ( certLoc[1], result[ 'Message' ] ) ) return False result = chain.generateProxyToFile( self.proxyLocation, 3600 ) if not result[ 'OK' ]: self.log.error( "Could not generate proxy file", result[ 'Message' ] ) return False self.log.info( "Proxy generated" ) return True
def __getCertificateID(self): """Get certificate identity information :return: S_OK()/S_ERROR() """ certLocation = Locations.getHostCertificateAndKeyLocation() if not certLocation: return S_ERROR("No certificate found!") chain = X509Chain() retVal = chain.loadChainFromFile(certLocation[0]) if not retVal["OK"]: return S_ERROR("Can't parse certificate!", retVal["Message"]) result = chain.getIssuerCert() if result["OK"]: idCert = result["Value"] result = idCert.getSubjectDN() if not result["OK"]: return result self.__userDN = result["Value"] self.__userGroup = "host" return S_OK()
def generateCAFile(location=None): """ Generate a single CA file with all the PEMs :param str location: we can specify a specific location in CS :return: file cas.pem which contains all certificates """ caDir = Locations.getCAsLocation() for fn in ( os.path.join(os.path.dirname(caDir), "cas.pem"), os.path.join( os.path.dirname( Locations.getHostCertificateAndKeyLocation(location)[0]), "cas.pem"), False): if not fn: fn = tempfile.mkstemp(prefix="cas.", suffix=".pem")[1] try: with open(fn, "w") as fd: for caFile in os.listdir(caDir): caFile = os.path.join(caDir, caFile) chain = X509Chain.X509Chain() result = chain.loadChainFromFile(caFile) if not result['OK']: continue expired = chain.hasExpired() if not expired['OK'] or expired['Value']: continue fd.write(chain.dumpAllToString()['Value']) gLogger.info("CAs used from: %s" % str(fn)) return S_OK(fn) except IOError as err: gLogger.warn(err) return S_ERROR(caDir)
def generateCAFile(location=None): """ Generate a single CA file with all the PEMs :param str location: we can specify a specific location in CS :return file cas.pem which contains all certificates """ caDir = Locations.getCAsLocation() for fn in (os.path.join(os.path.dirname(caDir), "cas.pem"), os.path.join(os.path.dirname(Locations.getHostCertificateAndKeyLocation(location)[0]), "cas.pem"), False): if not fn: fn = tempfile.mkstemp(prefix="cas.", suffix=".pem")[1] try: with open(fn, "w") as fd: for caFile in os.listdir(caDir): caFile = os.path.join(caDir, caFile) result = X509Chain.X509Chain.instanceFromFile(caFile) if not result['OK']: continue chain = result['Value'] expired = chain.hasExpired() if not expired['OK'] or expired['Value']: continue fd.write(chain.dumpAllToString()['Value']) gLogger.info("CAs used from: %s" % str(fn)) return S_OK(fn) except IOError as err: gLogger.warn(err) return S_ERROR(caDir)
def checkSanity( urlTuple, kwargs ): """ Check that all ssl environment is ok """ useCerts = False certFile = '' if "useCertificates" in kwargs and kwargs[ 'useCertificates' ]: certTuple = Locations.getHostCertificateAndKeyLocation() if not certTuple: gLogger.error( "No cert/key found! " ) return S_ERROR( "No cert/key found! " ) certFile = certTuple[0] useCerts = True elif "proxyString" in kwargs: if not isinstance( kwargs[ 'proxyString' ], basestring ): gLogger.error( "proxyString parameter is not a valid type", str( type( kwargs[ 'proxyString' ] ) ) ) return S_ERROR( "proxyString parameter is not a valid type" ) else: if "proxyLocation" in kwargs: certFile = kwargs[ "proxyLocation" ] else: certFile = Locations.getProxyLocation() if not certFile: gLogger.error( "No proxy found" ) return S_ERROR( "No proxy found" ) elif not os.path.isfile( certFile ): gLogger.error( "Proxy file does not exist", certFile ) return S_ERROR( "%s proxy file does not exist" % certFile ) #For certs always check CA's. For clients skipServerIdentityCheck if 'skipCACheck' not in kwargs or not kwargs[ 'skipCACheck' ]: if not Locations.getCAsLocation(): gLogger.error( "No CAs found!" ) return S_ERROR( "No CAs found!" ) if "proxyString" in kwargs: certObj = X509Chain() retVal = certObj.loadChainFromString( kwargs[ 'proxyString' ] ) if not retVal[ 'OK' ]: gLogger.error( "Can't load proxy string" ) return S_ERROR( "Can't load proxy string" ) else: if useCerts: certObj = X509Certificate() certObj.loadFromFile( certFile ) else: certObj = X509Chain() certObj.loadChainFromFile( certFile ) retVal = certObj.hasExpired() if not retVal[ 'OK' ]: gLogger.error( "Can't verify proxy or certificate file", "%s:%s" % ( certFile, retVal[ 'Message' ] ) ) return S_ERROR( "Can't verify file %s:%s" % ( certFile, retVal[ 'Message' ] ) ) else: if retVal[ 'Value' ]: notAfter = certObj.getNotAfterDate() if notAfter[ 'OK' ]: notAfter = notAfter[ 'Value' ] else: notAfter = "unknown" gLogger.error( "PEM file has expired", "%s is not valid after %s" % ( certFile, notAfter ) ) return S_ERROR( "PEM file %s has expired, not valid after %s" % ( certFile, notAfter ) ) idDict = {} retVal = certObj.getDIRACGroup( ignoreDefault = True ) if retVal[ 'OK' ] and retVal[ 'Value' ] != False: idDict[ 'group' ] = retVal[ 'Value' ] if useCerts: idDict[ 'DN' ] = certObj.getSubjectDN()[ 'Value' ] else: idDict[ 'DN' ] = certObj.getIssuerCert()[ 'Value' ].getSubjectDN()[ 'Value' ] return S_OK( idDict )
def setupConnection(self, parameters=None): #""" # Establishes a new connection to a Stomp server, e.g. RabbitMQ #:param dict parameters: dictionary with additional MQ parameters if any #:return: S_OK/S_ERROR #""" if parameters is not None: self.parameters.update(parameters) # Check that the minimum set of parameters is present if not all(p in parameters for p in ('Host', 'VHost')): return S_ERROR('Input parameters are missing!') # Make the actual connection host = self.parameters.get('Host') port = self.parameters.get('Port', 61613) vhost = self.parameters.get('VHost') sslVersion = self.parameters.get('SSLVersion') hostcert = self.parameters.get('HostCertificate') hostkey = self.parameters.get('HostKey') # get local key and certificate if not available via configuration if sslVersion and not (hostcert or hostkey): paths = Locations.getHostCertificateAndKeyLocation() if not paths: return S_ERROR('Could not find a certificate!') else: hostcert = paths[0] hostkey = paths[1] try: # get IP addresses of brokers brokers = socket.gethostbyname_ex(host) self.log.info('Broker name resolves to %s IP(s)' % len(brokers[2])) if sslVersion is None: pass elif sslVersion == 'TLSv1': sslVersion = ssl.PROTOCOL_TLSv1 else: return S_ERROR(EMQCONN, 'Invalid SSL version provided: %s' % sslVersion) for ip in brokers[2]: if sslVersion: self.connections[ip] = stomp.Connection( [(ip, int(port))], use_ssl=True, ssl_version=sslVersion, ssl_key_file=hostkey, ssl_cert_file=hostcert, vhost=vhost, keepalive=True ) else: self.connections[ip] = stomp.Connection( [(ip, int(port))], vhost=vhost, keepalive=True ) except Exception as e: return S_ERROR(EMQCONN, 'Failed to setup connection: %s' % e) return S_OK('Setup successful')
def executeForVO(self, vo): """ Perform the synchronisation for one VO. :param vo: VO name :return: S_OK or S_ERROR """ rSS = ResourceStatus() try: try: self.log.info( "Login to Rucio as privileged user with host cert/key") certKeyTuple = Locations.getHostCertificateAndKeyLocation() if not certKeyTuple: self.log.error("Hostcert/key location not set") return S_ERROR("Hostcert/key location not set") hostcert, hostkey = certKeyTuple self.log.info("Logging in with a host cert/key pair:") self.log.debug("account: ", self.clientConfig[vo]["privilegedAccount"]) self.log.debug("rucio host: ", self.clientConfig[vo]["rucioHost"]) self.log.debug("auth host: ", self.clientConfig[vo]["authHost"]) self.log.debug("CA cert path: ", self.caCertPath) self.log.debug("Cert location: ", hostcert) self.log.debug("Key location: ", hostkey) self.log.debug("VO: ", vo) client = Client( account=self.clientConfig[vo]["privilegedAccount"], rucio_host=self.clientConfig[vo]["rucioHost"], auth_host=self.clientConfig[vo]["authHost"], ca_cert=self.caCertPath, auth_type="x509", creds={ "client_cert": hostcert, "client_key": hostkey }, timeout=600, user_agent="rucio-clients", vo=vo, ) except Exception as err: self.log.info( "Login to Rucio as privileged user with host cert/key failed. Try username/password" ) client = Client(account="root", auth_type="userpass") except Exception as exc: # login exception, skip this VO self.log.exception("Login for VO failed. VO skipped ", "VO=%s" % vo, lException=exc) return S_ERROR(str(format_exc())) self.log.info( " Rucio login successful - continue with the RSS synchronisation") # return S_OK() try: for rse in client.list_rses(): thisSe = rse["rse"] self.log.info("Checking Dirac SE status for %s" % thisSe) resStatus = rSS.getElementStatus(thisSe, "StorageElement", vO=vo) dictSe = client.get_rse(thisSe) if resStatus["OK"]: self.log.debug("SE status ", resStatus["Value"]) seAccessValue = resStatus["Value"][thisSe] availabilityRead = True if seAccessValue["ReadAccess"] in [ "Active", "Degraded" ] else False availabilityWrite = True if seAccessValue[ "WriteAccess"] in ["Active", "Degraded"] else False availabilityDelete = True if seAccessValue[ "RemoveAccess"] in ["Active", "Degraded"] else False isUpdated = False if dictSe["availability_read"] != availabilityRead: self.log.info( "Set availability_read for RSE", "RSE: %s, availability: %s" % (thisSe, availabilityRead)) client.update_rse( thisSe, {"availability_read": availabilityRead}) isUpdated = True if dictSe["availability_write"] != availabilityWrite: self.log.info( "Set availability_write for RSE", "RSE: %s, availability: %s" % (thisSe, availabilityWrite)) client.update_rse( thisSe, {"availability_write": availabilityWrite}) isUpdated = True if dictSe["availability_delete"] != availabilityDelete: self.log.info( "Set availability_delete for RSE", "RSE: %s, availability: %s" % (thisSe, availabilityDelete), ) client.update_rse( thisSe, {"availability_delete": availabilityDelete}) isUpdated = True except Exception as err: return S_ERROR(str(err)) return S_OK()
def _request(self, retry=0, outputFile=None, **kwargs): """ Sends the request to server :param retry: internal parameters for recursive call. TODO: remove ? :param outputFile: (default None) path to a file where to store the received data. If set, the server response will be streamed for optimization purposes, and the response data will not go through the JDecode process :param **kwargs: Any argument there is used as a post parameter. They are detailed bellow. :param method: (mandatory) name of the distant method :param args: (mandatory) json serialized list of argument for the procedure :returns: The received data. If outputFile is set, return always S_OK """ # Adding some informations to send if self.__extraCredentials: kwargs[self.KW_EXTRA_CREDENTIALS] = encode(self.__extraCredentials) kwargs["clientVO"] = self.vo kwargs["clientSetup"] = self.setup # Getting URL url = self.__findServiceURL() if not url["OK"]: return url url = url["Value"] # Getting CA file (or skip verification) verify = not self.kwargs.get(self.KW_SKIP_CA_CHECK) if verify: if not self.__ca_location: self.__ca_location = Locations.getCAsLocation() if not self.__ca_location: gLogger.error("No CAs found!") return S_ERROR("No CAs found!") verify = self.__ca_location # getting certificate # Do we use the server certificate ? if self.kwargs[self.KW_USE_CERTIFICATES]: cert = Locations.getHostCertificateAndKeyLocation() elif self.kwargs.get(self.KW_PROXY_STRING): tmpHandle, cert = tempfile.mkstemp() fp = os.fdopen(tmpHandle, "wb") fp.write(self.kwargs[self.KW_PROXY_STRING]) fp.close() # CHRIS 04.02.21 # TODO: add proxyLocation check ? else: cert = Locations.getProxyLocation() if not cert: gLogger.error("No proxy found") return S_ERROR("No proxy found") # We have a try/except for all the exceptions # whose default behavior is to try again, # maybe to different server try: # And we have a second block to handle specific exceptions # which makes it not worth retrying try: rawText = None # Default case, just return the result if not outputFile: call = requests.post(url, data=kwargs, timeout=self.timeout, verify=verify, cert=cert) # raising the exception for status here # means essentialy that we are losing here the information of what is returned by the server # as error message, since it is not passed to the exception # However, we can store the text and return it raw as an error, # since there is no guarantee that it is any JEncoded text # Note that we would get an exception only if there is an exception on the server side which # is not handled. # Any standard S_ERROR will be transfered as an S_ERROR with a correct code. rawText = call.text call.raise_for_status() return decode(rawText)[0] else: # Instruct the server not to encode the response kwargs["rawContent"] = True rawText = None # Stream download # https://requests.readthedocs.io/en/latest/user/advanced/#body-content-workflow with requests.post( url, data=kwargs, timeout=self.timeout, verify=verify, cert=cert, stream=True ) as r: rawText = r.text r.raise_for_status() with open(outputFile, "wb") as f: for chunk in r.iter_content(4096): # if chunk: # filter out keep-alive new chuncks f.write(chunk) return S_OK() # Some HTTPError are not worth retrying except requests.exceptions.HTTPError as e: status_code = e.response.status_code if status_code == http_client.NOT_IMPLEMENTED: return S_ERROR(errno.ENOSYS, "%s is not implemented" % kwargs.get("method")) elif status_code in (http_client.FORBIDDEN, http_client.UNAUTHORIZED): return S_ERROR(errno.EACCES, "No access to %s" % url) # if it is something else, retry raise # Whatever exception we have here, we deem worth retrying except Exception as e: # CHRIS TODO review this part: retry logic is fishy # self.__bannedUrls is emptied in findServiceURLs if url not in self.__bannedUrls: self.__bannedUrls += [url] if retry < self.__nbOfUrls - 1: self._request(retry=retry + 1, outputFile=outputFile, **kwargs) errStr = "%s: %s" % (str(e), rawText) return S_ERROR(errStr)
def setupConnection(self, parameters=None): """ Establishes a new connection to a Stomp server, e.g. RabbitMQ Args: parameters(dict): dictionary with additional MQ parameters if any. Returns: S_OK/S_ERROR """ log = LOG.getSubLogger("setupConnection") if parameters is not None: self.parameters.update(parameters) # Check that the minimum set of parameters is present if not all(p in parameters for p in ("Host", "VHost")): return S_ERROR("Input parameters are missing!") reconnectSleepInitial = self.parameters.get( "ReconnectSleepInitial", StompMQConnector.RECONNECT_SLEEP_INITIAL) reconnectSleepIncrease = self.parameters.get( "ReconnectSleepIncrease", StompMQConnector.RECONNECT_SLEEP_INCREASE) reconnectSleepMax = self.parameters.get( "ReconnectSleepMax", StompMQConnector.RECONNECT_SLEEP_MAX) reconnectSleepJitter = self.parameters.get( "ReconnectSleepJitter", StompMQConnector.RECONNECT_SLEEP_JITTER) reconnectAttemptsMax = self.parameters.get( "ReconnectAttemptsMax", StompMQConnector.RECONNECT_ATTEMPTS_MAX) host = self.parameters.get("Host") port = self.parameters.get("Port", StompMQConnector.PORT) vhost = self.parameters.get("VHost") sslVersion = self.parameters.get("SSLVersion") hostcert = self.parameters.get("HostCertificate") hostkey = self.parameters.get("HostKey") connectionArgs = { "vhost": vhost, "keepalive": True, "reconnect_sleep_initial": reconnectSleepInitial, "reconnect_sleep_increase": reconnectSleepIncrease, "reconnect_sleep_max": reconnectSleepMax, "reconnect_sleep_jitter": reconnectSleepJitter, "reconnect_attempts_max": reconnectAttemptsMax, } sslArgs = None # We use ssl credentials and not user-password. if sslVersion is not None: if sslVersion == "TLSv1": sslVersion = ssl.PROTOCOL_TLSv1 # get local key and certificate if not available via configuration if not (hostcert or hostkey): paths = Locations.getHostCertificateAndKeyLocation() if not paths: return S_ERROR("Could not find a certificate!") hostcert = paths[0] hostkey = paths[1] sslArgs = { "use_ssl": True, "ssl_version": sslVersion, "ssl_key_file": hostkey, "ssl_cert_file": hostcert, } else: return S_ERROR(EMQCONN, "Invalid SSL version provided: %s" % sslVersion) try: # Get IP addresses of brokers # Start with the IPv6, and randomize it ipv6_addrInfo = socket.getaddrinfo(host, port, socket.AF_INET6, socket.SOCK_STREAM) random.shuffle(ipv6_addrInfo) # Same with IPv4 ipv4_addrInfo = socket.getaddrinfo(host, port, socket.AF_INET, socket.SOCK_STREAM) random.shuffle(ipv4_addrInfo) # Create the host_port tuples, keeping the ipv6 in front host_and_ports = [] for _family, _socktype, _proto, _canonname, sockaddr in ipv6_addrInfo + ipv4_addrInfo: host_and_ports.append((sockaddr[0], sockaddr[1])) connectionArgs.update({"host_and_ports": host_and_ports}) log.debug("Connection args: %s" % str(connectionArgs)) self.connection = stomp.Connection(**connectionArgs) if sslArgs: self.connection.set_ssl(**sslArgs) except Exception as e: log.debug("Failed setting up connection", repr(e)) return S_ERROR(EMQCONN, "Failed to setup connection: %s" % e) return S_OK("Setup successful")
def setupConnection(self, parameters=None): """ Establishes a new connection to a Stomp server, e.g. RabbitMQ Args: parameters(dict): dictionary with additional MQ parameters if any. Returns: S_OK/S_ERROR """ log = LOG.getSubLogger('setupConnection') if parameters is not None: self.parameters.update(parameters) # Check that the minimum set of parameters is present if not all(p in parameters for p in ('Host', 'VHost')): return S_ERROR('Input parameters are missing!') reconnectSleepInitial = self.parameters.get('ReconnectSleepInitial', StompMQConnector.RECONNECT_SLEEP_INITIAL) reconnectSleepIncrease = self.parameters.get('ReconnectSleepIncrease', StompMQConnector.RECONNECT_SLEEP_INCREASE) reconnectSleepMax = self.parameters.get('ReconnectSleepMax', StompMQConnector.RECONNECT_SLEEP_MAX) reconnectSleepJitter = self.parameters.get('ReconnectSleepJitter', StompMQConnector.RECONNECT_SLEEP_JITTER) reconnectAttemptsMax = self.parameters.get('ReconnectAttemptsMax', StompMQConnector.RECONNECT_ATTEMPTS_MAX) host = self.parameters.get('Host') port = self.parameters.get('Port', StompMQConnector.PORT) vhost = self.parameters.get('VHost') sslVersion = self.parameters.get('SSLVersion') hostcert = self.parameters.get('HostCertificate') hostkey = self.parameters.get('HostKey') connectionArgs = {'vhost': vhost, 'keepalive': True, 'reconnect_sleep_initial': reconnectSleepInitial, 'reconnect_sleep_increase': reconnectSleepIncrease, 'reconnect_sleep_max': reconnectSleepMax, 'reconnect_sleep_jitter': reconnectSleepJitter, 'reconnect_attempts_max': reconnectAttemptsMax} # We use ssl credentials and not user-password. if sslVersion is not None: if sslVersion == 'TLSv1': sslVersion = ssl.PROTOCOL_TLSv1 # get local key and certificate if not available via configuration if not (hostcert or hostkey): paths = Locations.getHostCertificateAndKeyLocation() if not paths: return S_ERROR('Could not find a certificate!') hostcert = paths[0] hostkey = paths[1] connectionArgs.update({ 'use_ssl': True, 'ssl_version': sslVersion, 'ssl_key_file': hostkey, 'ssl_cert_file': hostcert}) else: return S_ERROR(EMQCONN, 'Invalid SSL version provided: %s' % sslVersion) try: # Get IP addresses of brokers and ignoring two first returned arguments which are hostname and aliaslist. _, _, ip_addresses = socket.gethostbyname_ex(host) log.info('Broker name resolved', 'to %s IP(s)' % len(ip_addresses)) for ip in ip_addresses: connectionArgs.update({'host_and_ports': [(ip, int(port))]}) log.debug("Connection args: %s" % str(connectionArgs)) self.connections[ip] = stomp.Connection(**connectionArgs) except Exception as e: return S_ERROR(EMQCONN, 'Failed to setup connection: %s' % e) return S_OK('Setup successful')
def checkSanity(urlTuple, kwargs): """ Check that all ssl environment is ok """ useCerts = False if "useCertificates" in kwargs and kwargs['useCertificates']: certTuple = Locations.getHostCertificateAndKeyLocation() if not certTuple: gLogger.error("No cert/key found! ") return S_ERROR("No cert/key found! ") certFile = certTuple[0] useCerts = True elif "proxyString" in kwargs: if type(kwargs['proxyString']) != types.StringType: gLogger.error("proxyString parameter is not a valid type") return S_ERROR("proxyString parameter is not a valid type") else: if "proxyLocation" in kwargs: certFile = kwargs["proxyLocation"] else: certFile = Locations.getProxyLocation() if not certFile: gLogger.error("No proxy found") return S_ERROR("No proxy found") elif not os.path.isfile(certFile): gLogger.error("%s proxy file does not exist" % certFile) return S_ERROR("%s proxy file does not exist" % certFile) #For certs always check CA's. For clients skipServerIdentityCheck if 'skipCACheck' not in kwargs or not kwargs['skipCACheck']: if not Locations.getCAsLocation(): gLogger.error("No CAs found!") return S_ERROR("No CAs found!") if "proxyString" in kwargs: certObj = X509Chain() retVal = certObj.loadChainFromString(kwargs['proxyString']) if not retVal['OK']: gLogger.error("Can't load proxy string") return S_ERROR("Can't load proxy string") else: if useCerts: certObj = X509Certificate() certObj.loadFromFile(certFile) else: certObj = X509Chain() certObj.loadChainFromFile(certFile) retVal = certObj.hasExpired() if not retVal['OK']: gLogger.error("Can't verify file %s:%s" % (certFile, retVal['Message'])) return S_ERROR("Can't verify file %s:%s" % (certFile, retVal['Message'])) else: if retVal['Value']: notAfter = certObj.getNotAfterDate() if notAfter['OK']: notAfter = notAfter['Value'] else: notAfter = "unknown" gLogger.error("PEM file has expired", "%s is not valid after %s" % (certFile, notAfter)) return S_ERROR("PEM file %s has expired, not valid after %s" % (certFile, notAfter)) idDict = {} retVal = certObj.getDIRACGroup(ignoreDefault=True) if retVal['OK'] and retVal['Value'] != False: idDict['group'] = retVal['Value'] if useCerts: idDict['DN'] = certObj.getSubjectDN()['Value'] else: idDict['DN'] = certObj.getIssuerCert()['Value'].getSubjectDN()['Value'] return S_OK(idDict)
def setupConnection(self, parameters=None): #""" # Establishes a new connection to a Stomp server, e.g. RabbitMQ #:param dict parameters: dictionary with additional MQ parameters if any #:return: S_OK/S_ERROR #""" if parameters is not None: self.parameters.update(parameters) # Check that the minimum set of parameters is present if not all(p in parameters for p in ('Host', 'VHost')): return S_ERROR('Input parameters are missing!') # Make the actual connection host = self.parameters.get('Host') port = self.parameters.get('Port', 61613) vhost = self.parameters.get('VHost') sslVersion = self.parameters.get('SSLVersion') hostcert = self.parameters.get('HostCertificate') hostkey = self.parameters.get('HostKey') # get local key and certificate if not available via configuration if sslVersion and not (hostcert or hostkey): paths = Locations.getHostCertificateAndKeyLocation() if not paths: return S_ERROR('Could not find a certificate!') else: hostcert = paths[0] hostkey = paths[1] try: # get IP addresses of brokers brokers = socket.gethostbyname_ex(host) self.log.info('Broker name resolves to %s IP(s)' % len(brokers[2])) if sslVersion is None: pass elif sslVersion == 'TLSv1': sslVersion = ssl.PROTOCOL_TLSv1 else: return S_ERROR(EMQCONN, 'Invalid SSL version provided: %s' % sslVersion) for ip in brokers[2]: if sslVersion: self.connections[ip] = stomp.Connection( [(ip, int(port))], use_ssl=True, ssl_version=sslVersion, ssl_key_file=hostkey, ssl_cert_file=hostcert, vhost=vhost, keepalive=True) else: self.connections[ip] = stomp.Connection([(ip, int(port))], vhost=vhost, keepalive=True) except Exception as e: return S_ERROR(EMQCONN, 'Failed to setup connection: %s' % e) return S_OK('Setup successful')
def executeForVO(self, vo): """ Execute one SE and user synchronisation cycle for a VO. :param str vo: Virtual organisation name. :return: S_OK or S_ERROR :rtype: dict """ valid_protocols = ["srm", "gsiftp", "davs", "https", "root"] default_email = None try: try: client = Client(account="root", auth_type="userpass") except Exception as err: self.log.info( "Login to Rucio as root with password failed. Will try host cert/key", str(err)) certKeyTuple = Locations.getHostCertificateAndKeyLocation() if not certKeyTuple: self.log.error("Hostcert/key location not set") return S_ERROR("Hostcert/key location not set") hostcert, hostkey = certKeyTuple self.log.info("Logging in with a host cert/key pair:") self.log.debug("account: ", self.clientConfig[vo]["privilegedAccount"]) self.log.debug("rucio host: ", self.clientConfig[vo]["rucioHost"]) self.log.debug("auth host: ", self.clientConfig[vo]["authHost"]) self.log.debug("CA cert path: ", self.caCertPath) self.log.debug("Cert location: ", hostcert) self.log.debug("Key location: ", hostkey) self.log.debug("VO: ", vo) client = Client( account=self.clientConfig[vo]["privilegedAccount"], rucio_host=self.clientConfig[vo]["rucioHost"], auth_host=self.clientConfig[vo]["authHost"], ca_cert=self.caCertPath, auth_type="x509", creds={ "client_cert": hostcert, "client_key": hostkey }, timeout=600, user_agent="rucio-clients", vo=vo, ) self.log.info("Rucio client instantiated for VO:", vo) # Get the storage elements from Dirac Configuration and create them in Rucio newRSE = False self.log.info("Synchronizing SEs for VO ", vo) result = getStorageElements(vo) if result["OK"]: rses = [rse["rse"] for rse in client.list_rses()] for se in result["Value"]: if se not in rses: # The SE doesn't exist. Will create it newRSE = True self.log.info( "Rucio Storage Element does not exist and will be created:", se) try: client.add_rse(rse=se, deterministic=True, volatile=False) except Exception as err: self.log.error( "Cannot create RSE", "[RSE: %s, Error: %s]" % (se, str(err))) continue # Add RSE attributes for the new RSE ret = gConfig.getOptionsDict( "Resources/FTSEndpoints/FTS3") ftsList = "" if ret["OK"]: ftsList = ",".join(ret["Value"].values()) dictRSEAttributes = { "naming_convention": "BelleII", "ANY": True, "fts": ftsList } for key in dictRSEAttributes: self.log.info( "Setting RSE attributes", "[RSE: %s, Attr. name: %s, Value: %s]" % (se, key, dictRSEAttributes[key]), ) client.add_rse_attribute( se, key, value=dictRSEAttributes[key]) client.set_local_account_limit("root", se, 100000000000000000) # Create the protocols try: protocols = client.get_protocols(se) except RSEProtocolNotSupported as err: self.log.info("Cannot get protocols for", "[RSE %s, Error: %s]" % (se, str(err))) protocols = [] existing_protocols = [] for prot in protocols: existing_protocols.append( (str(prot["scheme"]), str(prot["hostname"]), str(prot["port"]), str(prot["prefix"]))) protocols_to_create = [] for params in result["Value"][se]: prot = ( str(params["scheme"]), str(params["hostname"]), str(params["port"]), str(params["prefix"]), ) protocols_to_create.append(prot) if prot not in existing_protocols and prot[ 0] in valid_protocols: # The protocol defined in Dirac does not exist in Rucio. Will be created self.log.info( "Will create new protocol:", "%s://%s:%s%s on %s" % (params["scheme"], params["hostname"], params["port"], params["prefix"], se), ) try: client.add_protocol(rse=se, params=params) except Duplicate as err: self.log.info( "Protocol already exists on", "[RSE: %s, schema:%s]" % (se, params["scheme"])) except Exception as err: self.log.error( "Cannot create protocol on RSE", "[RSE: %s, Error: %s]" % (se, str(err))) else: update = False for protocol in protocols: if prot == ( str(protocol["scheme"]), str(protocol["hostname"]), str(protocol["port"]), str(protocol["prefix"]), ): # Check if the protocol defined in Dirac has the same priority as the one defined in Rucio for domain in ["lan", "wan"]: for activity in [ "read", "write", "delete" ]: if (params["domains"][domain] [activity] != protocol["domains"][domain] [activity]): update = True break if (params["domains"]["wan"] ["third_party_copy"] != protocol["domains"]["wan"] ["third_party_copy"]): update = True if update: data = { "prefix": params["prefix"], "read_lan": params["domains"]["lan"]["read"], "read_wan": params["domains"]["wan"]["read"], "write_lan": params["domains"]["lan"]["write"], "write_wan": params["domains"]["wan"]["write"], "delete_lan": params["domains"]["lan"]["delete"], "delete_wan": params["domains"]["wan"]["delete"], "third_party_copy": params["domains"]["wan"]["write"], } self.log.info( "Will update protocol:", "%s://%s:%s%s on %s" % ( params["scheme"], params["hostname"], params["port"], params["prefix"], se, ), ) client.update_protocols( rse=se, scheme=params["scheme"], data=data, hostname=params["hostname"], port=params["port"], ) for prot in existing_protocols: if prot not in protocols_to_create: self.log.info( "Will delete protocol:", "%s://%s:%s%s on %s" % (prot[0], prot[1], prot[2], prot[3], se)) client.delete_protocols(se, scheme=prot[0], hostname=prot[1], port=prot[2]) else: self.log.error("Cannot get SEs:", result["Message"]) # If new RSE added, add distances rses = [rse["rse"] for rse in client.list_rses()] if newRSE: self.log.info("Adding distances") for src_rse, dest_rse in permutations(rses, r=2): try: client.add_distance(src_rse, dest_rse, { "ranking": 1, "distance": 10 }) except Exception as err: self.log.error( "Cannot add distance for", "Source RSE: %s, Dest RSE: %s, Error:%s" % (src_rse, dest_rse, str(err)), ) # Collect the shares from Dirac Configuration and create them in Rucio self.log.info("Synchronizing shares") result = Operations().getOptionsDict("Production/SEshares") if result["OK"]: rseDict = result["Value"] for rse in rses: try: self.log.info("Setting productionSEshare for", "[RSE: %s : Share: %s", rse, rseDict.get(rse, 0)) client.add_rse_attribute(rse, "productionSEshare", rseDict.get(rse, 0)) except Exception as err: self.log.error( "Cannot create productionSEshare for RSE:", rse) else: self.log.error("Cannot get SEs", result["Message"]) result = Operations().getSections("Shares") if result["OK"]: for dataLevel in result["Value"]: result = Operations().getOptionsDict("Shares/%s" % dataLevel) if not result["OK"]: self.log.error("Cannot get SEs:" % result["Message"]) continue rseDict = result["Value"] for rse in rses: try: self.log.info( "Setting", "%sShare for %s : %s" % (dataLevel, rse, rseDict.get(rse, 0))) client.add_rse_attribute(rse, "%sShare" % dataLevel, rseDict.get(rse, 0)) except Exception as err: self.log.error("Cannot create share:", "%sShare for %s", dataLevel, rse) else: self.log.error("Cannot get shares:", result["Message"]) # Create the RSE attribute PrimaryDataSE and OccupancyLFN result = gConfig.getValue( "Resources/StorageElementGroups/PrimarySEs") result = getStorageElements(vo) if result["OK"]: allSEs = result["Value"] primarySEs = resolveSEGroup("PrimarySEs", allSEs) self.log.info("Will set primarySEs flag to:", str(primarySEs)) for rse in rses: if rse in allSEs: storage = StorageElement(rse) if not storage.valid: self.log.warn( "Storage element is not valid. Skipped RSE:", rse) continue occupancyLFN = storage.options.get("OccupancyLFN") try: client.add_rse_attribute(rse, "OccupancyLFN", occupancyLFN) except Exception as err: self.log.error( "Cannot create RSE attribute OccupancyLFN for", "[RSE: %s, Error: %s]" % (rse, str(err))) if rse in primarySEs: try: client.add_rse_attribute(rse, "PrimaryDataSE", True) except Exception as err: self.log.error( "Cannot create RSE attribute PrimaryDataSE for", "[RSE: %s, Error: %s]" % (rse, str(err)), ) else: try: client.delete_rse_attribute(rse, "PrimaryDataSE") except RSEAttributeNotFound: pass except Exception as err: self.log.error( "Cannot remove RSE attribute PrimaryDataSE for", "[RSE: %s, Error: %s]" % (rse, str(err)), ) self.log.info("RSEs synchronized for VO: ", vo) # Collect the user accounts from Dirac Configuration and create user accounts in Rucio self.log.info("Synchronizing accounts for VO", vo) listAccounts = [ str(acc["account"]) for acc in client.list_accounts() ] listScopes = [str(scope) for scope in client.list_scopes()] dnMapping = {} diracUsers = getUsersInVO(vo) self.log.debug(" Will consider following Dirac users for", "[VO: %s, Dirac users: %s]" % (vo, diracUsers)) for account in diracUsers: dn = getUserOption(account, "DN") email = getUserOption(account, "Email") dnMapping[dn] = email if account not in listAccounts: self.log.info("Will create account with associated DN ", "[account: %s, DN: %s]" % (account, dn)) try: client.add_account(account, "USER", email) listAccounts.append(account) except Exception as err: self.log.error( "Cannot create account", "[account: %s, Error: %s]" % (account, str(err))) try: client.add_identity(account=account, identity=dn, authtype="X509", email=email, default=True) except Exception as err: self.log.error( "Cannot add identity for account", "[Identity: dn=%s, account:%s, Error: %s]" % (dn, account, str(err)), ) self.log.error( " Account/identity skipped (it will not be created in Rucio)", "[%s/%s]" % (account, dn)) continue for rse in rses: client.set_local_account_limit(account, rse, 1000000000000000) else: try: client.add_identity(account=account, identity=dn, authtype="X509", email=email, default=True) except Duplicate: pass except Exception as err: self.log.error( "Cannot create identity for account", "[DN: %s, account: %s, Error: %s]" % (dn, account, str(err)), ) scope = "user." + account if scope not in listScopes: try: self.log.info("Will create a scope", "[Scope: %s]" % scope) client.add_scope(account, scope) self.log.info("Scope successfully added", "[Scope: %s]" % scope) except Exception as err: self.log.error( "Cannot create a scope", "[Scope: %s, Error: %s]" % (scope, str(err))) # Collect the group accounts from Dirac Configuration and create service accounts in Rucio result = getGroupsForVO(vo) if result["OK"]: groups = result["Value"] self.log.debug(" Will consider following Dirac groups for", "[%s VO: %s]" % (vo, groups)) else: groups = [] self.log.debug("No Dirac groups for", "%s VO " % vo) self.log.debug("No Rucio service accounts will be created") for group in groups: if group not in listAccounts: self.log.info( "Will create SERVICE account for Dirac group:", str(group)) try: client.add_account(group, "SERVICE", None) listAccounts.append(group) except Exception as err: self.log.error( "Cannot create SERVICE account for", "[group: %s, Error: %s]" % (group, str(err))) for rse in rses: client.set_local_account_limit(account, rse, 1000000000000000) for dn in getDNsInGroup(group): try: client.add_identity(account=group, identity=dn, authtype="X509", email=dnMapping.get( dn, default_email)) except Duplicate: pass except Exception as err: self.log.error( "Cannot create identity for account", "[identity %s, account %s, Error: %s]" % (dn, group, str(err)), ) self.log.error(format_exc()) # Collect the group accounts from Dirac Configuration and create service accounts in Rucio result = getHosts() if not result["OK"]: self.log.error("Cannot get host accounts:", "%s" % result["Message"]) else: hosts = result["Value"] for host in hosts: dn = getHostOption(host, "DN") email = dnMapping.get(dn, default_email) try: client.add_identity(account="dirac_srv", identity=dn, authtype="X509", email=email) except Duplicate: pass except Exception as err: self.log.error( "Cannot create identity for account dirac_srv:", "[DN: %s, Error: %s]" % (dn, str(err))) self.log.error(format_exc()) return S_OK() except Exception as exc: self.log.exception("Synchronisation for VO failed. VO skipped ", "VO=%s" % vo, lException=exc) return S_ERROR(str(format_exc()))
def startTornado(self): """ Starts the tornado server when ready. This method never returns. """ # If there is no services loaded: if not self.__calculateAppSettings(): raise Exception( "There is no services loaded, please check your configuration") sLog.debug("Starting Tornado") # Prepare SSL settings certs = Locations.getHostCertificateAndKeyLocation() if certs is False: sLog.fatal("Host certificates not found ! Can't start the Server") raise ImportError("Unable to load certificates") ca = Locations.getCAsLocation() ssl_options = { "certfile": certs[0], "keyfile": certs[1], "cert_reqs": M2Crypto.SSL.verify_peer, "ca_certs": ca, "sslDebug": DEBUG_M2CRYPTO, # Set to true if you want to see the TLS debug messages } # Init monitoring if self.activityMonitoring: from DIRAC.MonitoringSystem.Client.MonitoringReporter import MonitoringReporter self.activityMonitoringReporter = MonitoringReporter( monitoringType="ServiceMonitoring") self.__monitorLastStatsUpdate = time.time() self.__report = self.__startReportToMonitoringLoop() # Response time # Starting monitoring, IOLoop waiting time in ms, __monitoringLoopDelay is defined in seconds tornado.ioloop.PeriodicCallback( self.__reportToMonitoring(self.__elapsedTime), self.__monitoringLoopDelay * 1000).start() # If we are running with python3, Tornado will use asyncio, # and we have to convince it to let us run in a different thread # Doing this ensures a consistent behavior between py2 and py3 asyncio.set_event_loop_policy( tornado.platform.asyncio.AnyThreadEventLoopPolicy()) for port, app in self.__appsSettings.items(): sLog.debug(" - %s" % "\n - ".join( ["%s = %s" % (k, ssl_options[k]) for k in ssl_options])) # Default server configuration settings = dict(compress_response=True, cookie_secret="secret") # Merge appllication settings settings.update(app["settings"]) # Start server router = Application(app["routes"], default_handler_class=NotFoundHandler, **settings) server = HTTPServer(router, ssl_options=ssl_options, decompress_request=True) try: server.listen(int(port)) except Exception as e: # pylint: disable=broad-except sLog.exception("Exception starting HTTPServer", e) raise sLog.always("Listening on port %s" % port) tornado.ioloop.IOLoop.current().start()
def checkSanity(urlTuple, kwargs): """ Check that all ssl environment is ok """ useCerts = False certFile = "" if "useCertificates" in kwargs and kwargs["useCertificates"]: certTuple = Locations.getHostCertificateAndKeyLocation() if not certTuple: gLogger.error("No cert/key found! ") return S_ERROR("No cert/key found! ") certFile = certTuple[0] useCerts = True elif "proxyString" in kwargs: if not isinstance(kwargs["proxyString"], six.string_types if six.PY2 else bytes): gLogger.error("proxyString parameter is not a valid type", str(type(kwargs["proxyString"]))) return S_ERROR("proxyString parameter is not a valid type") else: if "proxyLocation" in kwargs: certFile = kwargs["proxyLocation"] else: certFile = Locations.getProxyLocation() if not certFile: gLogger.error("No proxy found") return S_ERROR("No proxy found") elif not os.path.isfile(certFile): gLogger.error("Proxy file does not exist", certFile) return S_ERROR("%s proxy file does not exist" % certFile) # For certs always check CA's. For clients skipServerIdentityCheck if "skipCACheck" not in kwargs or not kwargs["skipCACheck"]: if not Locations.getCAsLocation(): gLogger.error("No CAs found!") return S_ERROR("No CAs found!") if "proxyString" in kwargs: certObj = X509Chain() retVal = certObj.loadChainFromString(kwargs["proxyString"]) if not retVal["OK"]: gLogger.error("Can't load proxy string") return S_ERROR("Can't load proxy string") else: if useCerts: certObj = X509Certificate() certObj.loadFromFile(certFile) else: certObj = X509Chain() certObj.loadChainFromFile(certFile) retVal = certObj.hasExpired() if not retVal["OK"]: gLogger.error("Can't verify proxy or certificate file", "%s:%s" % (certFile, retVal["Message"])) return S_ERROR("Can't verify file %s:%s" % (certFile, retVal["Message"])) else: if retVal["Value"]: notAfter = certObj.getNotAfterDate() if notAfter["OK"]: notAfter = notAfter["Value"] else: notAfter = "unknown" gLogger.error("PEM file has expired", "%s is not valid after %s" % (certFile, notAfter)) return S_ERROR("PEM file %s has expired, not valid after %s" % (certFile, notAfter)) idDict = {} retVal = certObj.getDIRACGroup(ignoreDefault=True) if retVal["OK"] and retVal["Value"] is not False: idDict["group"] = retVal["Value"] if useCerts: idDict["DN"] = certObj.getSubjectDN()["Value"] else: idDict["DN"] = certObj.getIssuerCert()["Value"].getSubjectDN()["Value"] return S_OK(idDict)