def downloadVOMSProxy( self, userDN, userGroup, limited=False, requiredTimeLeft=1200, cacheTime=14400, requiredVOMSAttribute=None, proxyToConnect=None, token=None, ): """Download a proxy if needed and transform it into a VOMS one :param str userDN: user DN :param str userGroup: user group :param boolean limited: if need limited proxy :param int requiredTimeLeft: required proxy live time in a seconds :param int cacheTime: store in a cache time in a seconds :param str requiredVOMSAttribute: VOMS attr to add to the proxy :param X509Chain proxyToConnect: proxy as a chain :param str token: valid token to get a proxy :return: S_OK(X509Chain)/S_ERROR() """ cacheKey = (userDN, userGroup, requiredVOMSAttribute, limited) if self.__vomsProxiesCache.exists(cacheKey, requiredTimeLeft): return S_OK(self.__vomsProxiesCache.get(cacheKey)) req = X509Request() req.generateProxyRequest(limited=limited) if proxyToConnect: rpcClient = Client(url="Framework/ProxyManager", proxyChain=proxyToConnect, timeout=120) else: rpcClient = Client(url="Framework/ProxyManager", timeout=120) if token: retVal = rpcClient.getVOMSProxyWithToken( userDN, userGroup, req.dumpRequest()["Value"], int(cacheTime + requiredTimeLeft), token, requiredVOMSAttribute, ) else: retVal = rpcClient.getVOMSProxy(userDN, userGroup, req.dumpRequest()["Value"], int(cacheTime + requiredTimeLeft), requiredVOMSAttribute) if not retVal["OK"]: return retVal chain = X509Chain(keyObj=req.getPKey()) retVal = chain.loadChainFromString(retVal["Value"]) if not retVal["OK"]: return retVal self.__vomsProxiesCache.add(cacheKey, chain.getRemainingSecs()["Value"], chain) return S_OK(chain)
def setPersistency(self, userDN, userGroup, persistent): """Set the persistency for user/group :param str userDN: user DN :param str userGroup: user group :param boolean persistent: presistent flag :return: S_OK()/S_ERROR() """ # Hack to ensure bool in the rpc call persistentFlag = True if not persistent: persistentFlag = False rpcClient = Client(url="Framework/ProxyManager", timeout=120) retVal = rpcClient.setPersistency(userDN, userGroup, persistentFlag) if not retVal["OK"]: return retVal # Update internal persistency cache cacheKey = (userDN, userGroup) record = self.__usersCache.get(cacheKey, 0) if record: record["persistent"] = persistentFlag self.__usersCache.add( cacheKey, self.__getSecondsLeftToExpiration(record["expirationtime"]), record) return retVal
def __sendData(self, secsTimeout=False): """This method is used to initialize the rpcClient from the server and also to initiate the task of registering the activities and committing the marks. :type secsTimeout: int :param secsTimeout: The timeout in seconds for the rpcClient. """ from DIRAC.FrameworkSystem.private.monitoring.ServiceInterface import gServiceInterface if gServiceInterface.srvUp: self.logger.debug("Using internal interface to send data") rpcClient = gServiceInterface else: self.logger.debug("Creating RPC client") # Here is where the client is created from the running Framework/Monitoring service. rpcClient = Client(url="Framework/Monitoring", timeout=secsTimeout) # Send registrations if not self.__sendRegistration(rpcClient): return False # Send marks maxIteration = 5 if self.__sendMarks(rpcClient) and maxIteration: maxIteration -= 1 if not self.__sendRegistration(rpcClient): return False
def getDirectoryMetadata(self, path): """Get metadata associated to the directory""" res = checkArgumentFormat(path) if not res["OK"]: return res urls = res["Value"] successful = {} failed = {} gLogger.debug("DIPStorage.getFileMetadata: Attempting to obtain metadata for %s directories." % len(urls)) serviceClient = Client(url=self.url) for url in urls: res = serviceClient.getMetadata(url) if res["OK"]: if res["Value"]["Exists"]: if res["Value"]["Type"] == "Directory": res["Value"]["Directory"] = True gLogger.debug("DIPStorage.getFileMetadata: Successfully obtained metadata for %s." % url) successful[url] = res["Value"] else: failed[url] = "Supplied path is not a directory" else: failed[url] = "Directory does not exist" else: gLogger.error( "DIPStorage.getFileMetadata: Failed to get metadata for url", "%s: %s" % (url, res["Message"]) ) failed[url] = res["Message"] resDict = {"Failed": failed, "Successful": successful} return S_OK(resDict)
def listDirectory(self, path): """List the contents of the directory""" res = checkArgumentFormat(path) if not res["OK"]: return res urls = res["Value"] successful = {} failed = {} gLogger.debug("DIPStorage.listDirectory: Attempting to list %s directories." % len(urls)) serviceClient = Client(url=self.url) for url in urls: res = serviceClient.listDirectory(url, "l") if not res["OK"]: failed[url] = res["Message"] else: files = {} subDirs = {} for subPath, pathDict in res["Value"].items(): if pathDict["Type"] == "File": files[subPath] = pathDict elif pathDict["Type"] == "Directory": subDirs[subPath] = pathDict successful[url] = {} successful[url]["SubDirs"] = subDirs successful[url]["Files"] = files resDict = {"Failed": failed, "Successful": successful} return S_OK(resDict)
def isDirectory(self, path): """Determine whether the path is a directory""" res = checkArgumentFormat(path) if not res["OK"]: return res urls = res["Value"] successful = {} failed = {} gLogger.debug("DIPStorage.isDirectory: Attempting to determine whether %s paths are directories." % len(urls)) serviceClient = Client(url=self.url) for url in urls: res = serviceClient.getMetadata(url) if res["OK"]: if res["Value"]["Exists"]: if res["Value"]["Type"] == "Directory": gLogger.debug("DIPStorage.isDirectory: Successfully obtained metadata for %s." % url) successful[url] = True else: successful[url] = False else: failed[url] = "Path does not exist" else: gLogger.error( "DIPStorage.isDirectory: Failed to get metadata for url", "%s: %s" % (url, res["Message"]) ) failed[url] = res["Message"] resDict = {"Failed": failed, "Successful": successful} return S_OK(resDict)
def uploadProxy(self, proxy=None, restrictLifeTime: int = 0, rfcIfPossible=None): """Upload a proxy to the proxy management service using delegation :param X509Chain proxy: proxy as a chain :param restrictLifeTime: proxy live time in a seconds :return: S_OK(dict)/S_ERROR() -- dict contain proxies """ if rfcIfPossible is not None: if os.environ.get("DIRAC_DEPRECATED_FAIL", None): raise NotImplementedError( "'rfcIfPossible' argument is deprecated.") gLogger.warn("'rfcIfPossible' argument is deprecated.") # Discover proxy location if isinstance(proxy, X509Chain): chain = proxy proxyLocation = "" else: if not proxy: proxyLocation = Locations.getProxyLocation() if not proxyLocation: return S_ERROR("Can't find a valid proxy") elif isinstance(proxy, str): proxyLocation = proxy else: return S_ERROR("Can't find a valid proxy") chain = X509Chain() result = chain.loadProxyFromFile(proxyLocation) if not result["OK"]: return S_ERROR( f"Can't load {proxyLocation}: {result['Message']}") # Make sure it's valid if chain.hasExpired().get("Value"): return S_ERROR(f"Proxy {proxyLocation} has expired") if chain.getDIRACGroup(ignoreDefault=True).get( "Value") or chain.isVOMS().get("Value"): return S_ERROR( "Cannot upload proxy with DIRAC group or VOMS extensions") rpcClient = Client(url="Framework/ProxyManager", timeout=120) # Get a delegation request result = rpcClient.requestDelegationUpload() if not result["OK"]: return result reqDict = result["Value"] # Generate delegated chain chainLifeTime = chain.getRemainingSecs()["Value"] - 60 if restrictLifeTime and restrictLifeTime < chainLifeTime: chainLifeTime = restrictLifeTime result = chain.generateChainFromRequestString(reqDict["request"], lifetime=chainLifeTime) if result["OK"]: result = rpcClient.completeDelegationUpload( reqDict["id"], pemChain := result["Value"]) return result
def __init__(self, fcName, **kwargs): """Constructor of the LCGFileCatalogProxy client class""" self.method = None self.fcName = fcName self.rpc = Client(url="DataManagement/FileCatalogProxy", timeout=120) self.valid = False self.valid = self.rpc.ping()["OK"] self.interfaceMethods = None
def uploadProxy(self, proxy=None, restrictLifeTime=0, rfcIfPossible=False): """Upload a proxy to the proxy management service using delegation :param X509Chain proxy: proxy as a chain :param int restrictLifeTime: proxy live time in a seconds :param boolean rfcIfPossible: make rfc proxy if possible :return: S_OK(dict)/S_ERROR() -- dict contain proxies """ # Discover proxy location if isinstance(proxy, X509Chain): chain = proxy proxyLocation = "" else: if not proxy: proxyLocation = Locations.getProxyLocation() if not proxyLocation: return S_ERROR("Can't find a valid proxy") elif isinstance(proxy, six.string_types): proxyLocation = proxy else: return S_ERROR("Can't find a valid proxy") chain = X509Chain() result = chain.loadProxyFromFile(proxyLocation) if not result["OK"]: return S_ERROR("Can't load %s: %s " % (proxyLocation, result["Message"])) # Make sure it's valid if chain.hasExpired().get("Value"): return S_ERROR("Proxy %s has expired" % proxyLocation) if chain.getDIRACGroup(ignoreDefault=True).get( "Value") or chain.isVOMS().get("Value"): return S_ERROR( "Cannot upload proxy with DIRAC group or VOMS extensions") rpcClient = Client(url="Framework/ProxyManager", timeout=120) # Get a delegation request result = rpcClient.requestDelegationUpload( chain.getRemainingSecs()["Value"]) if not result["OK"]: return result reqDict = result["Value"] # Generate delegated chain chainLifeTime = chain.getRemainingSecs()["Value"] - 60 if restrictLifeTime and restrictLifeTime < chainLifeTime: chainLifeTime = restrictLifeTime retVal = chain.generateChainFromRequestString(reqDict["request"], lifetime=chainLifeTime, rfc=rfcIfPossible) if not retVal["OK"]: return retVal # Upload! result = rpcClient.completeDelegationUpload(reqDict["id"], retVal["Value"]) if not result["OK"]: return result return S_OK(result.get("proxies") or result["Value"])
def getComponentsStatus(self, condDict): """This method is used to get the status of the components. :type condDict: dictionary :param condDict: A condition dictionary. :return: S_OK with status and message about the component. """ rpcClient = Client(url="Framework/Monitoring", timeout=100) return rpcClient.getComponentsStatus(condDict)
def deleteProxyBundle(self, idList): """delete a list of id's :param list,tuple idList: list of identity numbers :return: S_OK(int)/S_ERROR() """ rpcClient = Client(url="Framework/ProxyManager", timeout=120) return rpcClient.deleteProxyBundle(idList)
def getUserProxiesInfo(self): """Get the user proxies uploaded info :return: S_OK(dict)/S_ERROR() """ result = Client(url="Framework/ProxyManager", timeout=120).getUserProxiesInfo() if "rpcStub" in result: result.pop("rpcStub") return result
def doCommand(self): """ The Command pings a service on a vobox, it needs a service URL to ping it. :returns: a dict with the following: .. code-block:: python { 'serviceUpTime' : <serviceUpTime>, 'machineUpTime' : <machineUpTime>, 'site' : <site>, 'system' : <system>, 'service' : <service> } """ # INPUT PARAMETERS if "serviceURL" not in self.args: return self.returnERROR( S_ERROR('"serviceURL" not found in self.args')) serviceURL = self.args["serviceURL"] ## parsed = parse.urlparse(serviceURL) site = parsed[1].split(":")[0] try: system, service = parsed[2].strip("/").split("/") except ValueError: return self.returnERROR( S_ERROR('"%s" seems to be a malformed url' % serviceURL)) pinger = Client(url=serviceURL) resPing = pinger.ping() if not resPing["OK"]: return self.returnERROR(resPing) serviceUpTime = resPing["Value"].get("service uptime", 0) machineUpTime = resPing["Value"].get("host uptime", 0) result = { "site": site, "system": system, "service": service, "serviceUpTime": serviceUpTime, "machineUpTime": machineUpTime, } return S_OK(result)
def export_compactDB(self): """ Compact the db by grouping buckets """ # if we are running slaves (not only one service) we can redirect the request to the master # For more information please read the Administrative guide Accounting part! # ADVICE: If you want to trigger the bucketing, please make sure the bucketing is not running!!!! if self.runBucketing: return self.__acDB.compactBuckets() # pylint: disable=no-member return Client(url="Accounting/DataStoreMaster").compactDB()
def deleteRemoteProxy(userdn, vogroup): """ Deletes proxy for a vogroup for the user envoking this function. Returns a list of all deleted proxies (if any). """ rpcClient = Client(url="Framework/ProxyManager") retVal = rpcClient.deleteProxyBundle([(userdn, vogroup)]) if retVal["OK"]: gLogger.notice("Deleted proxy for %s." % vogroup) else: gLogger.error("Failed to delete proxy for %s." % vogroup)
def requestToken(self, requesterDN, requesterGroup, numUses=1): """Request a number of tokens. usesList must be a list of integers and each integer is the number of uses a token must have :param str requesterDN: user DN :param str requesterGroup: user group :param int numUses: number of uses :return: S_OK(tuple)/S_ERROR() -- tuple contain token, number uses """ rpcClient = Client(url="Framework/ProxyManager", timeout=120) return rpcClient.generateToken(requesterDN, requesterGroup, numUses)
def main(): with open(os.devnull, "w") as redirectStdout, open(os.devnull, "w") as redirectStderr: from DIRAC import gLogger from DIRAC.ConfigurationSystem.Client.ConfigurationData import gConfigurationData gConfigurationData.setOptionInCFG("/DIRAC/Security/UseServerCertificate", "true") gLogger.setLevel("FATAL") from DIRAC.Core.Base.Client import Client rpc = Client(url="dips://localhost:%s" % sys.argv[1]) res = rpc.ping() time.sleep(0.1) sys.exit(0 if res["OK"] else 1)
def getDBContents(self, condDict={}, sorting=[["UserDN", "DESC"]], start=0, limit=0): """Get the contents of the db :param dict condDict: search condition :return: S_OK(dict)/S_ERROR() -- dict contain fields, record list, total records """ rpcClient = Client(url="Framework/ProxyManager", timeout=120) return rpcClient.getContents(condDict, sorting, start, limit)
def __sendData(self): gLogger.debug("Sending records to security log service...") msgList = self.__messagesList self.__messagesList = [] rpcClient = Client(url="Framework/SecurityLogging") for _i in range(0, len(msgList), self.__maxMessagesInBundle): msgsToSend = msgList[:self.__maxMessagesInBundle] result = rpcClient.logActionBundle(msgsToSend) if not result["OK"]: self.__messagesList.extend(msgList) break msgList = msgList[self.__maxMessagesInBundle:] gLogger.debug("Data sent to security log service")
def _forceServiceUpdate(url, fromMaster): """ Force updating configuration on a given service This should be called by _updateServiceConfiguration :param str url: service URL :param bool fromMaster: flag to force updating from the master CS :return: S_OK/S_ERROR """ gLogger.info("Updating service configuration on", url) result = Client(url=url).refreshConfiguration(fromMaster) result["URL"] = url return result
def putFile(self, path, sourceSize=0): client = Client(url=self.url) if sourceSize: gLogger.debug( "ProxyStorage.putFile: The client has provided the source file size implying\ a replication is requested.") return client.callProxyMethod(self.name, "putFile", [path], {"sourceSize": sourceSize}) gLogger.debug( "ProxyStorage.putFile: No source size was provided therefore a simple put will be performed." ) res = checkArgumentFormat(path) if not res["OK"]: return res urls = res["Value"] failed = {} successful = {} # make sure transferClient uses the same ProxyStorage instance we uploaded the file to transferClient = TransferClient(client.serviceURL) for dest_url, src_file in urls.items(): fileName = os.path.basename(dest_url) res = transferClient.sendFile(src_file, "putFile/%s" % fileName) if not res["OK"]: gLogger.error( "ProxyStorage.putFile: Failed to send file to proxy server.", res["Message"]) failed[dest_url] = res["Message"] else: res = client.uploadFile(self.name, dest_url) if not res["OK"]: gLogger.error( "ProxyStorage.putFile: Failed to upload file to storage element from proxy server.", res["Message"], ) failed[dest_url] = res["Message"] else: res = self.__executeOperation(dest_url, "getFileSize") if not res["OK"]: gLogger.error( "ProxyStorage.putFile: Failed to determine destination file size.", res["Message"]) failed[dest_url] = res["Message"] else: successful[dest_url] = res["Value"] resDict = {"Failed": failed, "Successful": successful} return S_OK(resDict)
def __init__(self, args=None, clients=None): super(RunningJobsBySiteSplittedCommand, self).__init__(args, clients) if "ReportsClient" in self.apis: self.rClient = self.apis["ReportsClient"] else: self.rClient = ReportsClient() if "ReportGenerator" in self.apis: self.rgClient = self.apis["ReportGenerator"] else: self.rgClient = Client(url="Accounting/ReportGenerator") self.rClient.rpcClient = self.rgClient
def getDBOrClient(DB, serverName): """Tries to instantiate the DB object and returns it if we manage to connect to the DB, otherwise returns a Client of the server """ from DIRAC import gLogger from DIRAC.Core.Base.Client import Client try: database = DB() if database._connected: return database except Exception: pass gLogger.info("Can not connect to DB will use %s" % serverName) return Client(url=serverName)
def getOccupancy(self, *parms, **kws): """Gets the DIPStorage occupancy info :return: S_OK/S_ERROR (free and total space, in MB) """ rpc = Client(url=self.url, timeout=120) free = rpc.getFreeDiskSpace() if not free["OK"]: return free total = rpc.getTotalDiskSpace() if not total["OK"]: return total return S_OK({"Free": free["Value"], "Total": total["Value"]})
def setUp(self): self.__client = Client(url="WorkloadManagement/VirtualMachineManager") res = self.__client.checkVmWebOperation("Any") self.assertTrue(res["OK"]) if res["Value"] != "Auth": self.fail("Client has insufficient privs to test VM calls") # Create a test instance self.__inst_uuid = str(uuid.uuid4()) self.__inst_image = "ClientImage" self.__inst_name = "ClientInst-%s" % self.__inst_uuid self.__inst_ep = "UKI-CLOUD::testcloud.cloud" self.__inst_pod = "clientvo" res = self.__client.insertInstance(self.__inst_uuid, self.__inst_image, self.__inst_name, self.__inst_ep, self.__inst_pod) self.assertTrue(res["OK"]) self.__id = res["Value"]
def getFile(self, path, localPath=False): res = checkArgumentFormat(path) if not res["OK"]: return res urls = res["Value"] failed = {} successful = {} client = Client(url=self.url) # Make sure transferClient uses the same ProxyStorage instance. # Only the this one holds the file we want to transfer. transferClient = TransferClient(client.serviceURL) for src_url in urls.keys(): res = client.prepareFile(self.name, src_url) if not res["OK"]: gLogger.error( "ProxyStorage.getFile: Failed to prepare file on remote server.", res["Message"]) failed[src_url] = res["Message"] else: fileName = os.path.basename(src_url) if localPath: dest_file = "%s/%s" % (localPath, fileName) else: dest_file = "%s/%s" % (os.getcwd(), fileName) res = transferClient.receiveFile(dest_file, "getFile/%s" % fileName) if not res["OK"]: gLogger.error( "ProxyStorage.getFile: Failed to recieve file from proxy server.", res["Message"]) failed[src_url] = res["Message"] elif not os.path.exists(dest_file): errStr = "ProxyStorage.getFile: The destination local file does not exist." gLogger.error(errStr, dest_file) failed[src_url] = errStr else: destSize = getSize(dest_file) if destSize == -1: errStr = "ProxyStorage.getFile: Failed to get the local file size." gLogger.error(errStr, dest_file) failed[src_url] = errStr else: successful[src_url] = destSize resDict = {"Failed": failed, "Successful": successful} return S_OK(resDict)
def getDirectorySize(self, path): """Get the size of the contents of the directory""" res = checkArgumentFormat(path) if not res["OK"]: return res urls = res["Value"] successful = {} failed = {} gLogger.debug("DIPStorage.isDirectory: Attempting to determine whether %s paths are directories." % len(urls)) serviceClient = Client(url=self.url) for url in urls: res = serviceClient.getDirectorySize(url) if not res["OK"]: failed[url] = res["Message"] else: successful[url] = {"Files": 0, "Size": res["Value"], "SubDirs": 0} resDict = {"Failed": failed, "Successful": successful} return S_OK(resDict)
def exists(self, path): """Check if the given path exists. The 'path' variable can be a string or a list of strings.""" res = checkArgumentFormat(path) if not res["OK"]: return res urls = res["Value"] successful = {} failed = {} serviceClient = Client(url=self.url) for url in urls: gLogger.debug("DIPStorage.exists: Determining existence of %s." % url) res = serviceClient.exists(url) if res["OK"]: successful[url] = res["Value"] else: failed[url] = res["Message"] resDict = {"Failed": failed, "Successful": successful} return S_OK(resDict)
def __performCallback(self, status, callback, sourceTask): method, service = callback.split("@") gLogger.debug( "RequestFinalization.__performCallback: Attempting to perform call back for %s with %s status" % (sourceTask, status) ) client = Client(url=service) gLogger.debug("RequestFinalization.__performCallback: Created RPCClient to %s" % service) gLogger.debug("RequestFinalization.__performCallback: Attempting to invoke %s service method" % method) res = getattr(client, method)(sourceTask, status) if not res["OK"]: gLogger.error("RequestFinalization.__performCallback: Failed to perform callback", res["Message"]) else: gLogger.info( "RequestFinalization.__performCallback: Successfully issued callback to %s for %s with %s status" % (callback, sourceTask, status) ) return res
def checkService(self, serviceName, options): """Ping the service, restart if the ping does not respond.""" url = self._getURL(serviceName, options) self.log.info("Pinging service", url) pingRes = Client().ping(url=url) if not pingRes["OK"]: self.log.info("Failure pinging service: %s: %s" % (url, pingRes["Message"])) res = self.restartInstance(int(options["PID"]), serviceName, self.restartServices) if not res["OK"]: return res if res["Value"] != NO_RESTART: self.accounting[serviceName][ "Treatment"] = "Successfully Restarted" self.log.info("Service %s has been successfully restarted" % serviceName) self.log.info("Service responded OK") return S_OK()