def prepareTransformationTasks(self, transBody, taskDict, owner='', ownerGroup='', ownerDN='', bulkSubmissionFlag=False): """ Prepare tasks, given a taskDict, that is created (with some manipulation) by the DB jobClass is by default "DIRAC.Interfaces.API.Job.Job". An extension of it also works. :param transBody: transformation job template :param taskDict: dictionary of per task parameters :param owner: owner of the transformation :param ownerGroup: group of the owner of the transformation :param ownerDN: DN of the owner of the transformation :return: S_OK/S_ERROR with updated taskDict """ if (not owner) or (not ownerGroup): res = getProxyInfo(False, False) if not res['OK']: return res proxyInfo = res['Value'] owner = proxyInfo['username'] ownerGroup = proxyInfo['group'] if not ownerDN: res = getDNForUsername(owner) if not res['OK']: return res ownerDN = res['Value'][0] if bulkSubmissionFlag: return self.__prepareTasksBulk(transBody, taskDict, owner, ownerGroup, ownerDN) return self.__prepareTasks(transBody, taskDict, owner, ownerGroup, ownerDN)
def prepareTransformationTasks(self, transBody, taskDict, owner='', ownerGroup='', ownerDN='', bulkSubmissionFlag=False): """ Prepare tasks, given a taskDict, that is created (with some manipulation) by the DB """ if not taskDict: return S_OK({}) if (not owner) or (not ownerGroup): res = getProxyInfo(False, False) if not res['OK']: return res proxyInfo = res['Value'] owner = proxyInfo['username'] ownerGroup = proxyInfo['group'] if not ownerDN: res = getDNForUsername(owner) if not res['OK']: return res ownerDN = res['Value'][0] try: transJson = json.loads(transBody) self._multiOperationsBody(transJson, taskDict, ownerDN, ownerGroup) except ValueError: # #json couldn't load self._singleOperationsBody(transBody, taskDict, ownerDN, ownerGroup) return S_OK(taskDict)
def _getUsageHistoryForTimeSpan(self, timeSpan, groupToUse=""): result = self._getHistoryData(timeSpan, groupToUse) if not result["OK"]: self.log.error("Cannot get history data", result["Message"]) return result data = result["Value"].get("data", []) if not data: message = "Empty history data" self.log.warn(message) return S_ERROR(message) # Map the usernames to DNs if groupToUse: mappedData = {} for userName in data: result = getDNForUsername(userName) if not result["OK"]: self.log.error("User does not have any DN assigned", "%s :%s" % (userName, result["Message"])) continue for userDN in result["Value"]: mappedData[userDN] = data[userName] data = mappedData return S_OK(data)
def prepareTransformationTasks( self, transBody, taskDict, owner = '', ownerGroup = '', ownerDN = '', bulkSubmissionFlag = False ): """ Prepare tasks, given a taskDict, that is created (with some manipulation) by the DB jobClass is by default "DIRAC.Interfaces.API.Job.Job". An extension of it also works. :param transBody: transformation job template :param taskDict: dictionary of per task parameters :param owner: owner of the transformation :param ownerGroup: group of the owner of the transformation :param ownerDN: DN of the owner of the transformation :return: S_OK/S_ERROR with updated taskDict """ if ( not owner ) or ( not ownerGroup ): res = getProxyInfo( False, False ) if not res['OK']: return res proxyInfo = res['Value'] owner = proxyInfo['username'] ownerGroup = proxyInfo['group'] if not ownerDN: res = getDNForUsername( owner ) if not res['OK']: return res ownerDN = res['Value'][0] if bulkSubmissionFlag: return self.__prepareTransformationTasksBulk( transBody, taskDict, owner, ownerGroup, ownerDN ) else: return self.__prepareTransformationTasks( transBody, taskDict, owner, ownerGroup, ownerDN )
def oAuth2(): if self.get_secure_cookie("AccessToken"): access_token = self.get_secure_cookie("AccessToken") url = Conf.getCSValue( "TypeAuths/%s/authority" % typeAuth) + '/userinfo' heads = { 'Authorization': 'Bearer ' + access_token, 'Content-Type': 'application/json' } if 'error' in requests.get(url, headers=heads, verify=False).json(): self.log.error('OIDC request error: %s' % requests.get( url, headers=heads, verify=False).json()['error']) return ID = requests.get(url, headers=heads, verify=False).json()['sub'] result = getUsernameForID(ID) if result['OK']: self.__credDict['username'] = result['Value'] result = getDNForUsername(self.__credDict['username']) if result['OK']: self.__credDict['validDN'] = True self.__credDict['DN'] = result['Value'][0] result = getCAForUsername(self.__credDict['username']) if result['OK']: self.__credDict['issuer'] = result['Value'][0] return
def prepareTransformationTasks(self, transBody, taskDict, owner='', ownerGroup='', ownerDN='', bulkSubmissionFlag=False): """ Prepare tasks, given a taskDict, that is created (with some manipulation) by the DB """ if not taskDict: return S_OK({}) if (not owner) or (not ownerGroup): res = getProxyInfo(False, False) if not res['OK']: return res proxyInfo = res['Value'] owner = proxyInfo['username'] ownerGroup = proxyInfo['group'] if not ownerDN: res = getDNForUsername(owner) if not res['OK']: return res ownerDN = res['Value'][0] try: transJson = json.loads(transBody) self._multiOperationsBody(transJson, taskDict, ownerDN, ownerGroup) except ValueError: # #json couldn't load self._singleOperationsBody(transBody, taskDict, ownerDN, ownerGroup) return S_OK(taskDict)
def wrapped_fcn( *args, **kwargs ): userName = kwargs.pop( 'proxyUserName', '' ) userDN = kwargs.pop( 'proxyUserDN', '' ) userGroup = kwargs.pop( 'proxyUserGroup', '' ) vomsFlag = kwargs.pop( 'proxyWithVOMS', True ) proxyFilePath = kwargs.pop( 'proxyFilePath', False ) if ( userName or userDN ) and userGroup: # Setup user proxy originalUserProxy = os.environ.get( 'X509_USER_PROXY' ) if not userDN: result = getDNForUsername( userName ) if not result[ 'OK' ]: return result userDN = result[ 'Value' ][0] vomsAttr = '' if vomsFlag: vomsAttr = getVOMSAttributeForGroup( userGroup ) if vomsAttr: result = gProxyManager.downloadVOMSProxyToFile( userDN, userGroup, requiredVOMSAttribute = vomsAttr, filePath = proxyFilePath, requiredTimeLeft = 3600, cacheTime = 3600 ) else: result = gProxyManager.downloadProxyToFile( userDN, userGroup, filePath = proxyFilePath, requiredTimeLeft = 3600, cacheTime = 3600 ) if not result['OK']: gLogger.warn( "Can't download proxy to file", result['Message'] ) return result proxyFile = result['Value'] os.environ['X509_USER_PROXY'] = proxyFile # Check if the caller is executing with the host certificate useServerCertificate = gConfig.useServerCertificate() if useServerCertificate: gConfigurationData.setOptionInCFG( '/DIRAC/Security/UseServerCertificate', 'false' ) try: resultFcn = fcn( *args, **kwargs ) except Exception, x: resultFcn = S_ERROR( "Exception: %s" % str( x ) ) # Restore the default host certificate usage if necessary if useServerCertificate: gConfigurationData.setOptionInCFG( '/DIRAC/Security/UseServerCertificate', 'true' ) if originalUserProxy: os.environ['X509_USER_PROXY'] = originalUserProxy else: os.environ.pop( 'X509_USER_PROXY' ) return resultFcn
def wrapped_fcn(*args, **kwargs): userName = kwargs.pop("proxyUserName", "") userGroup = kwargs.pop("proxyUserGroup", "") vomsFlag = kwargs.pop("proxyWithVOMS", True) proxyFilePath = kwargs.pop("proxyFilePath", False) if userName and userGroup: # Setup user proxy originalUserProxy = os.environ.get("X509_USER_PROXY") result = getDNForUsername(userName) if not result["OK"]: return result userDN = result["Value"][0] vomsAttr = "" if vomsFlag: vomsAttr = getVOMSAttributeForGroup(userGroup) if vomsAttr: result = gProxyManager.downloadVOMSProxyToFile( userDN, userGroup, requiredVOMSAttribute=vomsAttr, filePath=proxyFilePath, requiredTimeLeft=3600, cacheTime=3600, ) else: result = gProxyManager.downloadProxyToFile( userDN, userGroup, filePath=proxyFilePath, requiredTimeLeft=3600, cacheTime=3600 ) if not result["OK"]: return result proxyFile = result["Value"] os.environ["X509_USER_PROXY"] = proxyFile # Check if the caller is executing with the host certificate useServerCertificate = gConfig.useServerCertificate() if useServerCertificate: gConfigurationData.setOptionInCFG("/DIRAC/Security/UseServerCertificate", "false") try: resultFcn = fcn(*args, **kwargs) except Exception, x: resultFcn = S_ERROR("Exception: %s" % str(x)) # Restore the default host certificate usage if necessary if useServerCertificate: gConfigurationData.setOptionInCFG("/DIRAC/Security/UseServerCertificate", "true") if originalUserProxy: os.environ["X509_USER_PROXY"] = originalUserProxy else: os.environ.pop("X509_USER_PROXY") return resultFcn
def wrapped_fcn(*args, **kwargs): userName = kwargs.pop('proxyUserName', '') userDN = kwargs.pop('proxyUserDN', '') userGroup = kwargs.pop('proxyUserGroup', '') vomsFlag = kwargs.pop('proxyWithVOMS', True) proxyFilePath = kwargs.pop('proxyFilePath', False) if (userName or userDN) and userGroup: # Setup user proxy originalUserProxy = os.environ.get('X509_USER_PROXY') if userDN: userDNs = [userDN] else: result = getDNForUsername(userName) if not result['OK']: return result userDNs = result[ 'Value'] # a same user may have more than one DN vomsAttr = '' if vomsFlag: vomsAttr = getVOMSAttributeForGroup(userGroup) result = getProxy(userDNs, userGroup, vomsAttr, proxyFilePath) if not result['OK']: return result proxyFile = result['Value'] os.environ['X509_USER_PROXY'] = proxyFile # Check if the caller is executing with the host certificate useServerCertificate = gConfig.useServerCertificate() if useServerCertificate: gConfigurationData.setOptionInCFG( '/DIRAC/Security/UseServerCertificate', 'false') try: return fcn(*args, **kwargs) except Exception as lException: #pylint: disable=broad-except value = ','.join([str(arg) for arg in lException.args]) exceptType = lException.__class__.__name__ return S_ERROR("Exception - %s: %s" % (exceptType, value)) finally: # Restore the default host certificate usage if necessary if useServerCertificate: gConfigurationData.setOptionInCFG( '/DIRAC/Security/UseServerCertificate', 'true') if originalUserProxy: os.environ['X509_USER_PROXY'] = originalUserProxy else: os.environ.pop('X509_USER_PROXY') else: # No proxy substitution requested return fcn(*args, **kwargs)
def getFTS3Context(self, username, group, ftsServer, threadID): """ Returns an fts3 context for a given user, group and fts server The context pool is per thread, and there is one context per tuple (user, group, server). We dump the proxy of a user to a file (shared by all the threads), and use it to make the context. The proxy needs a lifetime of at least 2h, is cached for 1.5h, and the lifetime of the context is 45mn :param username: name of the user :param group: group of the user :param ftsServer: address of the server :returns: S_OK with the context object """ log = gLogger.getSubLogger("getFTS3Context", child=True) contextes = self._globalContextCache.setdefault(threadID, DictCache()) idTuple = (username, group, ftsServer) log.debug("Getting context for %s" % (idTuple, )) if not contextes.exists(idTuple, 2700): res = getDNForUsername(username) if not res['OK']: return res # We take the first DN returned userDN = res['Value'][0] log.debug("UserDN %s" % userDN) # We dump the proxy to a file. # It has to have a lifetime of at least 2 hours # and we cache it for 1.5 hours res = gProxyManager.downloadVOMSProxyToFile(userDN, group, requiredTimeLeft=7200, cacheTime=5400) if not res['OK']: return res proxyFile = res['Value'] log.debug("Proxy file %s" % proxyFile) # We generate the context res = FTS3Job.generateContext(ftsServer, proxyFile) if not res['OK']: return res context = res['Value'] # we add it to the cache for this thread for 1h contextes.add(idTuple, 3600, context) return S_OK(contextes.get(idTuple))
def wrapped_fcn( *args, **kwargs ): userName = kwargs.pop( 'proxyUserName', '' ) userGroup = kwargs.pop( 'proxyUserGroup', '' ) vomsFlag = kwargs.pop( 'proxyWithVOMS', True ) proxyFilePath = kwargs.pop( 'proxyFilePath', False ) if userName and userGroup: # Setup user proxy originalUserProxy = os.environ.get( 'X509_USER_PROXY' ) result = getDNForUsername( userName ) if not result[ 'OK' ]: return result userDN = result[ 'Value' ][0] vomsAttr = '' if vomsFlag: vomsAttr = getVOMSAttributeForGroup( userGroup ) if vomsAttr: result = gProxyManager.downloadVOMSProxyToFile( userDN, userGroup, requiredVOMSAttribute = vomsAttr, filePath = proxyFilePath, requiredTimeLeft = 3600, cacheTime = 3600 ) else: result = gProxyManager.downloadProxyToFile( userDN, userGroup, filePath = proxyFilePath, requiredTimeLeft = 3600, cacheTime = 3600 ) if not result['OK']: return result proxyFile = result['Value'] os.environ['X509_USER_PROXY'] = proxyFile # Check if the caller is executing with the host certificate useServerCertificate = gConfig.useServerCertificate() if useServerCertificate: gConfigurationData.setOptionInCFG( '/DIRAC/Security/UseServerCertificate', 'false' ) try: resultFcn = fcn( *args, **kwargs ) except Exception, x: resultFcn = S_ERROR( "Exception: %s" % str( x ) ) # Restore the default host certificate usage if necessary if useServerCertificate: gConfigurationData.setOptionInCFG( '/DIRAC/Security/UseServerCertificate', 'true' ) if originalUserProxy: os.environ['X509_USER_PROXY'] = originalUserProxy else: os.environ.pop( 'X509_USER_PROXY' ) return resultFcn
def prepareTransformationTasks( self, transBody, taskDict, owner = '', ownerGroup = '', ownerDN = '' ): """ Prepare tasks, given a taskDict, that is created (with some manipulation) by the DB """ if ( not owner ) or ( not ownerGroup ): res = getProxyInfo( False, False ) if not res['OK']: return res proxyInfo = res['Value'] owner = proxyInfo['username'] ownerGroup = proxyInfo['group'] if not ownerDN: res = getDNForUsername( owner ) if not res['OK']: return res ownerDN = res['Value'][0] requestOperation = 'ReplicateAndRegister' if transBody: try: _requestType, requestOperation = transBody.split( ';' ) except AttributeError: pass for taskID in sorted( taskDict ): paramDict = taskDict[taskID] if paramDict['InputData']: transID = paramDict['TransformationID'] oRequest = Request() transfer = Operation() transfer.Type = requestOperation transfer.TargetSE = paramDict['TargetSE'] if isinstance( paramDict['InputData'], list ): files = paramDict['InputData'] elif isinstance( paramDict['InputData'], basestring ): files = paramDict['InputData'].split( ';' ) for lfn in files: trFile = File() trFile.LFN = lfn transfer.addFile( trFile ) oRequest.addOperation( transfer ) oRequest.RequestName = _requestName( transID, taskID ) oRequest.OwnerDN = ownerDN oRequest.OwnerGroup = ownerGroup isValid = self.requestValidator.validate( oRequest ) if not isValid['OK']: return isValid taskDict[taskID]['TaskObject'] = oRequest return S_OK( taskDict )
def prepareTransformationTasks( self, transBody, taskDict, owner = '', ownerGroup = '', ownerDN = '' ): """ Prepare tasks, given a taskDict, that is created (with some manipulation) by the DB """ if ( not owner ) or ( not ownerGroup ): res = getProxyInfo( False, False ) if not res['OK']: return res proxyInfo = res['Value'] owner = proxyInfo['username'] ownerGroup = proxyInfo['group'] if not ownerDN: res = getDNForUsername( owner ) if not res['OK']: return res ownerDN = res['Value'][0] requestOperation = 'ReplicateAndRegister' if transBody: try: _requestType, requestOperation = transBody.split( ';' ) except AttributeError: pass for taskID in sorted( taskDict ): paramDict = taskDict[taskID] if paramDict['InputData']: transID = paramDict['TransformationID'] oRequest = Request() transfer = Operation() transfer.Type = requestOperation transfer.TargetSE = paramDict['TargetSE'] if isinstance( paramDict['InputData'], list ): files = paramDict['InputData'] elif isinstance( paramDict['InputData'], basestring ): files = paramDict['InputData'].split( ';' ) for lfn in files: trFile = File() trFile.LFN = lfn transfer.addFile( trFile ) oRequest.addOperation( transfer ) oRequest.RequestName = _requestName( transID, taskID ) oRequest.OwnerDN = ownerDN oRequest.OwnerGroup = ownerGroup isValid = self.requestValidator.validate( oRequest ) if not isValid['OK']: return isValid taskDict[taskID]['TaskObject'] = oRequest return S_OK( taskDict )
def getFTS3Context(self, username, group, ftsServer, threadID): """ Returns an fts3 context for a given user, group and fts server The context pool is per thread, and there is one context per tuple (user, group, server). We dump the proxy of a user to a file (shared by all the threads), and use it to make the context. The proxy needs a lifetime of at least 2h, is cached for 1.5h, and the lifetime of the context is 45mn :param username: name of the user :param group: group of the user :param ftsServer: address of the server :returns: S_OK with the context object """ log = gLogger.getSubLogger("getFTS3Context", child=True) contextes = self._globalContextCache.setdefault(threadID, DictCache()) idTuple = (username, group, ftsServer) log.debug("Getting context for %s" % (idTuple, )) if not contextes.exists(idTuple, 2700): res = getDNForUsername(username) if not res['OK']: return res # We take the first DN returned userDN = res['Value'][0] log.debug("UserDN %s" % userDN) # We dump the proxy to a file. # It has to have a lifetime of at least 2 hours # and we cache it for 1.5 hours res = gProxyManager.downloadVOMSProxyToFile( userDN, group, requiredTimeLeft=7200, cacheTime=5400) if not res['OK']: return res proxyFile = res['Value'] log.debug("Proxy file %s" % proxyFile) # We generate the context res = FTS3Job.generateContext(ftsServer, proxyFile) if not res['OK']: return res context = res['Value'] # we add it to the cache for this thread for 1h contextes.add(idTuple, 3600, context) return S_OK(contextes.get(idTuple))
def wrapped_fcn( *args, **kwargs ): userName = kwargs.pop( 'proxyUserName', '' ) userDN = kwargs.pop( 'proxyUserDN', '' ) userGroup = kwargs.pop( 'proxyUserGroup', '' ) vomsFlag = kwargs.pop( 'proxyWithVOMS', True ) proxyFilePath = kwargs.pop( 'proxyFilePath', False ) if ( userName or userDN ) and userGroup: # Setup user proxy originalUserProxy = os.environ.get( 'X509_USER_PROXY' ) if userDN: userDNs = [userDN] else: result = getDNForUsername( userName ) if not result[ 'OK' ]: return result userDNs = result['Value'] # a same user may have more than one DN vomsAttr = '' if vomsFlag: vomsAttr = getVOMSAttributeForGroup( userGroup ) result = getProxy(userDNs, userGroup, vomsAttr, proxyFilePath) if not result['OK']: return result proxyFile = result['Value'] os.environ['X509_USER_PROXY'] = proxyFile # Check if the caller is executing with the host certificate useServerCertificate = gConfig.useServerCertificate() if useServerCertificate: gConfigurationData.setOptionInCFG( '/DIRAC/Security/UseServerCertificate', 'false' ) try: return fcn( *args, **kwargs ) except Exception as lException: value = ','.join( [str( arg ) for arg in lException.args] ) exceptType = lException.__class__.__name__ return S_ERROR( "Exception - %s: %s" % ( exceptType, value ) ) finally: # Restore the default host certificate usage if necessary if useServerCertificate: gConfigurationData.setOptionInCFG( '/DIRAC/Security/UseServerCertificate', 'true' ) if originalUserProxy: os.environ['X509_USER_PROXY'] = originalUserProxy else: os.environ.pop( 'X509_USER_PROXY' ) else: # No proxy substitution requested return fcn( *args, **kwargs )
def _getCurrentUser( self ): res = getProxyInfo( False, False ) if not res['OK']: return self._errorReport( 'No proxy found in local environment', res['Message'] ) proxyInfo = res['Value'] gLogger.debug( formatProxyInfoAsString( proxyInfo ) ) if 'group 'not in proxyInfo: return self._errorReport( 'Proxy information does not contain the group', res['Message'] ) res = getDNForUsername( proxyInfo['username'] ) if not res['OK']: return self._errorReport( 'Failed to get proxies for user', res['Message'] ) return S_OK( proxyInfo['username'] )
def _getCurrentUser( self ): res = getProxyInfo( False, False ) if not res['OK']: return self._errorReport( 'No proxy found in local environment', res['Message'] ) proxyInfo = res['Value'] gLogger.debug( formatProxyInfoAsString( proxyInfo ) ) if 'group' not in proxyInfo: return self._errorReport( 'Proxy information does not contain the group', res['Message'] ) res = getDNForUsername( proxyInfo['username'] ) if not res['OK']: return self._errorReport( 'Failed to get proxies for user', res['Message'] ) return S_OK( proxyInfo['username'] )
def getDNandRole(info): global dnCache, roleCache owner = {} username = dnCache.get(info['Owner']) if username: result = getDNForUsername(info['Owner']) if not result['OK']: print "Cannot find DN" return S_ERROR() owner['OwnerDN']=result['Value'] owner['OwnerRole'] = 'ilc/Role=production' else: print "no username" return owner
def _getCurrentUser(self): """Get current user :return: S_OK(dict)/S_ERROR() """ res = getProxyInfo(False, False) if not res["OK"]: return self._errorReport("No proxy found in local environment", res["Message"]) proxyInfo = res["Value"] gLogger.debug(formatProxyInfoAsString(proxyInfo)) if "group" not in proxyInfo: return self._errorReport("Proxy information does not contain the group", res["Message"]) res = getDNForUsername(proxyInfo["username"]) if not res["OK"]: return self._errorReport("Failed to get proxies for user", res["Message"]) return S_OK(proxyInfo["username"])
def export_getPilotStatistics(cls, attribute, selectDict): """Get pilot statistics distribution per attribute value with a given selection""" startDate = selectDict.get("FromDate", None) if startDate: del selectDict["FromDate"] if startDate is None: startDate = selectDict.get("LastUpdate", None) if startDate: del selectDict["LastUpdate"] endDate = selectDict.get("ToDate", None) if endDate: del selectDict["ToDate"] # Owner attribute is not part of PilotAgentsDB # It has to be converted into a OwnerDN owners = selectDict.get("Owner") if owners: ownerDNs = [] for owner in owners: result = getDNForUsername(owner) if not result["OK"]: return result ownerDNs.append(result["Value"]) selectDict["OwnerDN"] = ownerDNs del selectDict["Owner"] result = cls.pilotAgentsDB.getCounters("PilotAgents", [attribute], selectDict, newer=startDate, older=endDate, timeStamp="LastUpdateTime") statistics = {} if result["OK"]: for status, count in result["Value"]: if "OwnerDN" in status: userName = getUsernameForDN(status["OwnerDN"]) if userName["OK"]: status["OwnerDN"] = userName["Value"] statistics[status["OwnerDN"]] = count else: statistics[status[attribute]] = count return S_OK(statistics)
def __getCredentials(self): """Get the credentials to use if ShifterCredentials are set, otherwise do nothing. This function fills the self.credTuple tuple. """ if not self.credentials: return S_OK() resCred = Operations().getOptionsDict("/Shifter/%s" % self.credentials) if not resCred['OK']: self.log.error("Cred: Failed to find shifter credentials", self.credentials) return resCred owner = resCred['Value']['User'] ownerGroup = resCred['Value']['Group'] # returns a list ownerDN = getDNForUsername(owner)['Value'][0] self.credTuple = (owner, ownerGroup, ownerDN) self.log.info("Cred: Tasks will be submitted with the credentials %s:%s" % (owner, ownerGroup)) return S_OK()
def getDirectoryMetadata( self, lfns, timeout = 120 ): ''' Get standard directory metadata ''' rpcClient = self._getRPC( timeout = timeout ) result = rpcClient.getDirectoryMetadata( lfns ) if not result['OK']: return result # Add some useful fields for path in result['Value']['Successful']: owner = result['Value']['Successful'][path]['Owner'] group = result['Value']['Successful'][path]['OwnerGroup'] res = getDNForUsername( owner ) if res['OK']: result['Value']['Successful'][path]['OwnerDN'] = res['Value'][0] else: result['Value']['Successful'][path]['OwnerDN'] = '' result['Value']['Successful'][path]['OwnerRole'] = getVOMSAttributeForGroup( group ) return result
def getDirectoryMetadata(self, lfns, timeout=120): ''' Get standard directory metadata ''' rpcClient = self._getRPC(timeout=timeout) result = rpcClient.getDirectoryMetadata(lfns) if not result['OK']: return result # Add some useful fields for path in result['Value']['Successful']: owner = result['Value']['Successful'][path]['Owner'] group = result['Value']['Successful'][path]['OwnerGroup'] res = getDNForUsername(owner) if res['OK']: result['Value']['Successful'][path]['OwnerDN'] = res['Value'][0] else: result['Value']['Successful'][path]['OwnerDN'] = '' result['Value']['Successful'][path]['OwnerRole'] = getVOMSAttributeForGroup(group) return result
def __getCredentials(self): """Get the credentials to use if ShifterCredentials are set, otherwise do nothing. This function fills the self.credTuple tuple. """ if not self.credentials: return S_OK() resCred = Operations().getOptionsDict("/Shifter/%s" % self.credentials) if not resCred["OK"]: self.log.error("Cred: Failed to find shifter credentials", self.credentials) return resCred owner = resCred["Value"]["User"] ownerGroup = resCred["Value"]["Group"] # returns a list ownerDN = getDNForUsername(owner)["Value"][0] self.credTuple = (owner, ownerGroup, ownerDN) self.log.info("Cred: Tasks will be submitted with the credentials %s:%s" % (owner, ownerGroup)) return S_OK()
def _putProxy(userDN=None, userName=None, userGroup=None, vomsFlag=None, proxyFilePath=None, executionLockFlag=False): """Download proxy, place in a file and populate X509_USER_PROXY environment variable. Parameters like `userProxy` or `executeWithUserProxy`. :returns: Tuple of originalUserProxy, useServerCertificate, executionLock """ # Setup user proxy if userDN: userDNs = [userDN] else: result = getDNForUsername(userName) if not result["OK"]: return result userDNs = result["Value"] # a same user may have more than one DN vomsAttr = "" if vomsFlag: vomsAttr = getVOMSAttributeForGroup(userGroup) result = getProxy(userDNs, userGroup, vomsAttr, proxyFilePath) if not result["OK"]: return result executionLock = LockRing().getLock( "_UseUserProxy_", recursive=True) if executionLockFlag else None if executionLockFlag: executionLock.acquire() os.environ["X509_USER_PROXY"], originalUserProxy = result[ "Value"], os.environ.get("X509_USER_PROXY") # Check if the caller is executing with the host certificate useServerCertificate = gConfig.useServerCertificate() if useServerCertificate: gConfigurationData.setOptionInCFG( "/DIRAC/Security/UseServerCertificate", "false") return S_OK((originalUserProxy, useServerCertificate, executionLock))
def getDirectoryMetadata(self, lfns, timeout=120): """Get standard directory metadata""" rpcClient = self._getRPC(timeout=timeout) result = rpcClient.getDirectoryMetadata(lfns) if not result["OK"]: return result # Add some useful fields for path in result["Value"]["Successful"]: owner = result["Value"]["Successful"][path]["Owner"] group = result["Value"]["Successful"][path]["OwnerGroup"] res = getDNForUsername(owner) if res["OK"]: result["Value"]["Successful"][path]["OwnerDN"] = res["Value"][ 0] else: result["Value"]["Successful"][path]["OwnerDN"] = "" result["Value"]["Successful"][path][ "OwnerRole"] = getVOMSAttributeForGroup(group) return result
def oAuth2(): if self.get_secure_cookie("AccessToken"): access_token = self.get_secure_cookie("AccessToken") url = Conf.getCSValue("TypeAuths/%s/authority" % typeAuth) + '/userinfo' heads = {'Authorization': 'Bearer ' + access_token, 'Content-Type': 'application/json'} if 'error' in requests.get(url, headers=heads, verify=False).json(): self.log.error('OIDC request error: %s' % requests.get(url, headers=heads, verify=False).json()['error']) return ID = requests.get(url, headers=heads, verify=False).json()['sub'] result = getUsernameForID(ID) if result['OK']: self.__credDict['username'] = result['Value'] result = getDNForUsername(self.__credDict['username']) if result['OK']: self.__credDict['validDN'] = True self.__credDict['DN'] = result['Value'][0] result = getCAForUsername(self.__credDict['username']) if result['OK']: self.__credDict['issuer'] = result['Value'][0] return
def prepareTransformationTasks(self, transBody, taskDict, owner="", ownerGroup="", ownerDN="", bulkSubmissionFlag=False): """Prepare tasks, given a taskDict, that is created (with some manipulation) by the DB jobClass is by default "DIRAC.Interfaces.API.Job.Job". An extension of it also works. :param str transBody: transformation job template :param dict taskDict: dictionary of per task parameters :param str owner: owner of the transformation :param str ownerGroup: group of the owner of the transformation :param str ownerDN: DN of the owner of the transformation :param bool bulkSubmissionFlag: flag for using bulk submission or not :return: S_OK/S_ERROR with updated taskDict """ if (not owner) or (not ownerGroup): res = getProxyInfo(False, False) if not res["OK"]: return res proxyInfo = res["Value"] owner = proxyInfo["username"] ownerGroup = proxyInfo["group"] if not ownerDN: res = getDNForUsername(owner) if not res["OK"]: return res ownerDN = res["Value"][0] if bulkSubmissionFlag: return self.__prepareTasksBulk(transBody, taskDict, owner, ownerGroup, ownerDN) # not a bulk submission return self.__prepareTasks(transBody, taskDict, owner, ownerGroup, ownerDN)
def _isAllowed(opObj, remoteCredentials): """ Make sure the client is allowed to persist an operation (FULL_DELEGATION or LIMITED_DELEGATION). This is the case of pilots, the RequestExecutingAgent or the FTS3Agent :param opObj: the FTS3Operation object :param remoteCredentials: credentials from the clients :returns: True if everything is fine, False otherwise """ credDN = remoteCredentials["DN"] credGroup = remoteCredentials["group"] credProperties = remoteCredentials["properties"] # First, get the DN matching the username res = getDNForUsername(opObj.username) # if we have an error, do not allow if not res["OK"]: gLogger.error("Error retrieving DN for username", res) return False # List of DN matching the username dnList = res["Value"] # If the credentials in the Request match those from the credentials, it's OK if credDN in dnList and opObj.userGroup == credGroup: return True # From here, something/someone is putting a request on behalf of someone else # Only allow this if the credentials have Full or Limited delegation properties if FULL_DELEGATION in credProperties or LIMITED_DELEGATION in credProperties: return True return False
def prepareTransformationTasks(self, transBody, taskDict, owner="", ownerGroup="", ownerDN="", bulkSubmissionFlag=False): """Prepare tasks, given a taskDict, that is created (with some manipulation) by the DB""" if not taskDict: return S_OK({}) if (not owner) or (not ownerGroup): res = getProxyInfo(False, False) if not res["OK"]: return res proxyInfo = res["Value"] owner = proxyInfo["username"] ownerGroup = proxyInfo["group"] if not ownerDN: res = getDNForUsername(owner) if not res["OK"]: return res ownerDN = res["Value"][0] try: transJson, _decLen = decode(transBody) if isinstance(transJson, BaseBody): self._bodyPlugins(transJson, taskDict, ownerDN, ownerGroup) else: self._multiOperationsBody(transJson, taskDict, ownerDN, ownerGroup) except ValueError: # #json couldn't load self._singleOperationsBody(transBody, taskDict, ownerDN, ownerGroup) return S_OK(taskDict)
def _putProxy(userDN=None, userName=None, userGroup=None, vomsFlag=None, proxyFilePath=None, executionLockFlag=False): """Download proxy, place in a file and populate X509_USER_PROXY environment variable. Parameters like `userProxy` or `executeWithUserProxy`. :returns: Tuple of originalUserProxy, useServerCertificate, executionLock """ # Setup user proxy if userDN: userDNs = [userDN] else: result = getDNForUsername(userName) if not result['OK']: return result userDNs = result['Value'] # a same user may have more than one DN vomsAttr = '' if vomsFlag: vomsAttr = getVOMSAttributeForGroup(userGroup) result = getProxy(userDNs, userGroup, vomsAttr, proxyFilePath) if not result['OK']: return result executionLock = LockRing().getLock('_UseUserProxy_', recursive=True) if executionLockFlag else None if executionLockFlag: executionLock.acquire() os.environ['X509_USER_PROXY'], originalUserProxy = result['Value'], os.environ.get('X509_USER_PROXY') # Check if the caller is executing with the host certificate useServerCertificate = gConfig.useServerCertificate() if useServerCertificate: gConfigurationData.setOptionInCFG('/DIRAC/Security/UseServerCertificate', 'false') return S_OK((originalUserProxy, useServerCertificate, executionLock))
def generateProxyOrToken( self, client, grant_type, user=None, scope=None, expires_in=None, include_refresh_token=True ): """Generate proxy or tokens after authorization :param client: instance of the IdP client :param grant_type: authorization grant type (unused) :param str user: user identificator :param str scope: requested scope :param expires_in: when the token should expire (unused) :param bool include_refresh_token: to include refresh token (unused) :return: dict or str -- will return tokens as dict or proxy as string """ # Read requested scopes group = self._getScope(scope, "g") lifetime = self._getScope(scope, "lifetime") # Found provider name for group provider = getIdPForGroup(group) # Search DIRAC username by user ID result = getUsernameForDN(wrapIDAsDN(user)) if not result["OK"]: raise OAuth2Error(result["Message"]) userName = result["Value"] # User request a proxy if "proxy" in scope_to_list(scope): # Try to return user proxy if proxy scope present in the authorization request if not isDownloadProxyAllowed(): raise OAuth2Error("You can't get proxy, configuration(allowProxyDownload) not allow to do that.") sLog.debug( "Try to query %s@%s proxy%s" % (user, group, ("with lifetime:%s" % lifetime) if lifetime else "") ) # Get user DNs result = getDNForUsername(userName) if not result["OK"]: raise OAuth2Error(result["Message"]) userDNs = result["Value"] err = [] # Try every DN to generate a proxy for dn in userDNs: sLog.debug("Try to get proxy for %s" % dn) params = {} if lifetime: params["requiredTimeLeft"] = int(lifetime) # if the configuration describes adding a VOMS extension, we will do so if getGroupOption(group, "AutoAddVOMS", False): result = self.proxyCli.downloadVOMSProxy(dn, group, **params) else: # otherwise we will return the usual proxy result = self.proxyCli.downloadProxy(dn, group, **params) if not result["OK"]: err.append(result["Message"]) else: sLog.info("Proxy was created.") result = result["Value"].dumpAllToString() if not result["OK"]: raise OAuth2Error(result["Message"]) # Proxy generated return { "proxy": result["Value"].decode() if isinstance(result["Value"], bytes) else result["Value"] } # Proxy cannot be generated or not found raise OAuth2Error("; ".join(err)) # User request a tokens else: # Ask TokenManager to generate new tokens for user result = self.tokenCli.getToken(userName, group) if not result["OK"]: raise OAuth2Error(result["Message"]) token = result["Value"] # Wrap the refresh token and register it to protect against reuse result = self.registerRefreshToken( dict(sub=user, scope=scope, provider=provider, azp=client.get_client_id()), token ) if not result["OK"]: raise OAuth2Error(result["Message"]) # Return tokens as dictionary return result["Value"]
def getPilotMonitorWeb(self, selectDict, sortList, startItem, maxItems): """Get summary of the pilot job information in a standard structure""" resultDict = {} if "LastUpdateTime" in selectDict: del selectDict["LastUpdateTime"] if "Owner" in selectDict: userList = selectDict["Owner"] if not isinstance(userList, list): userList = [userList] dnList = [] for uName in userList: uList = getDNForUsername(uName)["Value"] dnList += uList selectDict["OwnerDN"] = dnList del selectDict["Owner"] startDate = selectDict.get("FromDate", None) if startDate: del selectDict["FromDate"] # For backward compatibility if startDate is None: startDate = selectDict.get("LastUpdateTime", None) if startDate: del selectDict["LastUpdateTime"] endDate = selectDict.get("ToDate", None) if endDate: del selectDict["ToDate"] # Sorting instructions. Only one for the moment. if sortList: orderAttribute = sortList[0][0] + ":" + sortList[0][1] else: orderAttribute = None # Select pilots for the summary result = self.selectPilots(selectDict, orderAttribute=orderAttribute, newer=startDate, older=endDate, timeStamp="LastUpdateTime") if not result["OK"]: return S_ERROR("Failed to select pilots: " + result["Message"]) pList = result["Value"] nPilots = len(pList) resultDict["TotalRecords"] = nPilots if nPilots == 0: return S_OK(resultDict) ini = startItem last = ini + maxItems if ini >= nPilots: return S_ERROR("Item number out of range") if last > nPilots: last = nPilots pilotList = pList[ini:last] paramNames = [ "PilotJobReference", "OwnerDN", "OwnerGroup", "GridType", "Broker", "Status", "DestinationSite", "BenchMark", "ParentID", "SubmissionTime", "PilotID", "LastUpdateTime", "CurrentJobID", "TaskQueueID", "GridSite", ] result = self.getPilotInfo(pilotList, paramNames=paramNames) if not result["OK"]: return S_ERROR("Failed to get pilot info: " + result["Message"]) pilotDict = result["Value"] records = [] for pilot in pilotList: parList = [] for parameter in paramNames: if not isinstance(pilotDict[pilot][parameter], six.integer_types): parList.append(str(pilotDict[pilot][parameter])) else: parList.append(pilotDict[pilot][parameter]) if parameter == "GridSite": gridSite = pilotDict[pilot][parameter] # If the Grid Site is unknown try to recover it in the last moment if gridSite == "Unknown": ce = pilotDict[pilot]["DestinationSite"] result = getCESiteMapping(ce) if result["OK"]: gridSite = result["Value"].get(ce) del parList[-1] parList.append(gridSite) records.append(parList) resultDict["ParameterNames"] = paramNames resultDict["Records"] = records return S_OK(resultDict)
def getFTS3Context(self, username, group, ftsServer, threadID): """ Returns an fts3 context for a given user, group and fts server The context pool is per thread, and there is one context per tuple (user, group, server). We dump the proxy of a user to a file (shared by all the threads), and use it to make the context. The proxy needs a lifetime of self.proxyLifetime, is cached for cacheTime = (2*lifeTime/3) - 10mn, and the lifetime of the context is 45mn The reason for cacheTime to be what it is is because the FTS3 server will ask for a new proxy after 2/3rd of the existing proxy has expired, so we renew it just before :param str username: name of the user :param str group: group of the user :param str ftsServer: address of the server :param str threadID: thread ID :returns: S_OK with the context object """ log = gLogger.getSubLogger("getFTS3Context", child=True) contextes = self._globalContextCache.setdefault(threadID, DictCache()) idTuple = (username, group, ftsServer) log.debug("Getting context for %s" % (idTuple, )) # We keep a context in the cache for 45 minutes # (so it needs to be valid at least 15 since we add it for one hour) if not contextes.exists(idTuple, 15 * 60): res = getDNForUsername(username) if not res['OK']: return res # We take the first DN returned userDN = res['Value'][0] log.debug("UserDN %s" % userDN) # We dump the proxy to a file. # It has to have a lifetime of self.proxyLifetime # Because the FTS3 servers cache it for 2/3rd of the lifetime # we should make our cache a bit less than 2/3rd of the lifetime cacheTime = int(2 * self.proxyLifetime / 3) - 600 res = gProxyManager.downloadVOMSProxyToFile( userDN, group, requiredTimeLeft=self.proxyLifetime, cacheTime=cacheTime) if not res['OK']: return res proxyFile = res['Value'] log.debug("Proxy file %s" % proxyFile) # We generate the context # In practice, the lifetime will be less than proxyLifetime # because we reuse a cached proxy. However, the cached proxy will # never forced a redelegation, because it is recent enough for FTS3 servers. # The delegation is forced when 2/3 rd of the lifetime are left, and we get a fresh # one just before. So no problem res = FTS3Job.generateContext(ftsServer, proxyFile, lifetime=self.proxyLifetime) if not res['OK']: return res context = res['Value'] # we add it to the cache for this thread for 1h contextes.add(idTuple, 3600, context) return S_OK(contextes.get(idTuple))
def prepareTransformationTasks(self, transBody, taskDict, owner='', ownerGroup='', ownerDN=''): """ Prepare tasks, given a taskDict, that is created (with some manipulation) by the DB jobClass is by default "DIRAC.Interfaces.API.Job.Job". An extension of it also works. """ if (not owner) or (not ownerGroup): res = getProxyInfo(False, False) if not res['OK']: return res proxyInfo = res['Value'] owner = proxyInfo['username'] ownerGroup = proxyInfo['group'] if not ownerDN: res = getDNForUsername(owner) if not res['OK']: return res ownerDN = res['Value'][0] for taskNumber in sorted(taskDict): oJob = self.jobClass(transBody) paramsDict = taskDict[taskNumber] site = oJob.workflow.findParameter('Site').getValue() paramsDict['Site'] = site jobType = oJob.workflow.findParameter('JobType').getValue() paramsDict['JobType'] = jobType transID = paramsDict['TransformationID'] self._logVerbose('Setting job owner:group to %s:%s' % (owner, ownerGroup)) oJob.setOwner(owner) oJob.setOwnerGroup(ownerGroup) oJob.setOwnerDN(ownerDN) transGroup = str(transID).zfill(8) self._logVerbose('Adding default transformation group of %s' % (transGroup)) oJob.setJobGroup(transGroup) constructedName = str(transID).zfill(8) + '_' + str( taskNumber).zfill(8) self._logVerbose('Setting task name to %s' % constructedName) oJob.setName(constructedName) oJob._setParamValue('PRODUCTION_ID', str(transID).zfill(8)) oJob._setParamValue('JOB_ID', str(taskNumber).zfill(8)) inputData = None self._logDebug('TransID: %s, TaskID: %s, paramsDict: %s' % (transID, taskNumber, str(paramsDict))) # These helper functions do the real job sites = self._handleDestination(paramsDict) if not sites: self._logError('Could not get a list a sites') taskDict[taskNumber]['TaskObject'] = '' continue else: self._logVerbose('Setting Site: ', str(sites)) res = oJob.setDestination(sites) if not res['OK']: self._logError('Could not set the site: %s' % res['Message']) continue self._handleInputs(oJob, paramsDict) self._handleRest(oJob, paramsDict) hospitalTrans = [ int(x) for x in self.opsH.getValue("Hospital/Transformations", []) ] if int(transID) in hospitalTrans: self._handleHospital(oJob) taskDict[taskNumber]['TaskObject'] = '' if self.outputDataModule: res = self.getOutputData( { 'Job': oJob._toXML(), 'TransformationID': transID, 'TaskID': taskNumber, 'InputData': inputData }, moduleLocation=self.outputDataModule) if not res['OK']: self._logError("Failed to generate output data", res['Message']) continue for name, output in res['Value'].items(): oJob._addJDLParameter(name, ';'.join(output)) taskDict[taskNumber]['TaskObject'] = self.jobClass(oJob._toXML()) return S_OK(taskDict)
def prepareTransformationTasks( self, transBody, taskDict, owner = '', ownerGroup = '', ownerDN = '' ): """ Prepare tasks, given a taskDict, that is created (with some manipulation) by the DB """ if not taskDict: return S_OK({}) if ( not owner ) or ( not ownerGroup ): res = getProxyInfo( False, False ) if not res['OK']: return res proxyInfo = res['Value'] owner = proxyInfo['username'] ownerGroup = proxyInfo['group'] if not ownerDN: res = getDNForUsername( owner ) if not res['OK']: return res ownerDN = res['Value'][0] requestOperation = 'ReplicateAndRegister' if transBody: try: _requestType, requestOperation = transBody.split( ';' ) except AttributeError: pass # Do not remove sorted, we might pop elements in the loop for taskID in sorted( taskDict ): paramDict = taskDict[taskID] transID = paramDict['TransformationID'] oRequest = Request() transfer = Operation() transfer.Type = requestOperation transfer.TargetSE = paramDict['TargetSE'] # If there are input files if paramDict['InputData']: if isinstance( paramDict['InputData'], list ): files = paramDict['InputData'] elif isinstance( paramDict['InputData'], basestring ): files = paramDict['InputData'].split( ';' ) for lfn in files: trFile = File() trFile.LFN = lfn transfer.addFile( trFile ) oRequest.addOperation( transfer ) oRequest.RequestName = _requestName( transID, taskID ) oRequest.OwnerDN = ownerDN oRequest.OwnerGroup = ownerGroup isValid = self.requestValidator.validate( oRequest ) if not isValid['OK']: self.log.error( "Error creating request for task", "%s %s" % ( taskID, isValid ) ) # This works because we loop over a copy of the keys ! taskDict.pop( taskID ) continue taskDict[taskID]['TaskObject'] = oRequest return S_OK( taskDict )
def prepareTransformationTasks(self, transBody, taskDict, owner='', ownerGroup='', ownerDN=''): """ Prepare tasks, given a taskDict, that is created (with some manipulation) by the DB """ if not taskDict: return S_OK({}) if (not owner) or (not ownerGroup): res = getProxyInfo(False, False) if not res['OK']: return res proxyInfo = res['Value'] owner = proxyInfo['username'] ownerGroup = proxyInfo['group'] if not ownerDN: res = getDNForUsername(owner) if not res['OK']: return res ownerDN = res['Value'][0] requestOperation = 'ReplicateAndRegister' if transBody: try: _requestType, requestOperation = transBody.split(';') except AttributeError: pass # Do not remove sorted, we might pop elements in the loop for taskID in sorted(taskDict): paramDict = taskDict[taskID] transID = paramDict['TransformationID'] oRequest = Request() transfer = Operation() transfer.Type = requestOperation transfer.TargetSE = paramDict['TargetSE'] # If there are input files if paramDict['InputData']: if isinstance(paramDict['InputData'], list): files = paramDict['InputData'] elif isinstance(paramDict['InputData'], basestring): files = paramDict['InputData'].split(';') for lfn in files: trFile = File() trFile.LFN = lfn transfer.addFile(trFile) oRequest.addOperation(transfer) oRequest.RequestName = _requestName(transID, taskID) oRequest.OwnerDN = ownerDN oRequest.OwnerGroup = ownerGroup isValid = self.requestValidator.validate(oRequest) if not isValid['OK']: self.log.error("Error creating request for task", "%s %s" % (taskID, isValid)) # This works because we loop over a copy of the keys ! taskDict.pop(taskID) continue taskDict[taskID]['TaskObject'] = oRequest return S_OK(taskDict)
def wrapped_fcn( *args, **kwargs ): userName = kwargs.pop( 'proxyUserName', '' ) userDN = kwargs.pop( 'proxyUserDN', '' ) userGroup = kwargs.pop( 'proxyUserGroup', '' ) vomsFlag = kwargs.pop( 'proxyWithVOMS', True ) proxyFilePath = kwargs.pop( 'proxyFilePath', False ) if ( userName or userDN ) and userGroup: # Setup user proxy originalUserProxy = os.environ.get( 'X509_USER_PROXY' ) if not userDN: result = getDNForUsername( userName ) if not result[ 'OK' ]: return result userDN = result[ 'Value' ][0] vomsAttr = '' if vomsFlag: vomsAttr = getVOMSAttributeForGroup( userGroup ) if vomsAttr: result = gProxyManager.downloadVOMSProxyToFile( userDN, userGroup, requiredVOMSAttribute = vomsAttr, filePath = proxyFilePath, requiredTimeLeft = 3600, cacheTime = 3600 ) else: result = gProxyManager.downloadProxyToFile( userDN, userGroup, filePath = proxyFilePath, requiredTimeLeft = 3600, cacheTime = 3600 ) if not result['OK']: gLogger.warn( "Can't download proxy to file", result['Message'] ) return result proxyFile = result['Value'] os.environ['X509_USER_PROXY'] = proxyFile # Check if the caller is executing with the host certificate useServerCertificate = gConfig.useServerCertificate() if useServerCertificate: gConfigurationData.setOptionInCFG( '/DIRAC/Security/UseServerCertificate', 'false' ) try: return fcn( *args, **kwargs ) except Exception as lException: value = ','.join( [str( arg ) for arg in lException.args] ) exceptType = lException.__class__.__name__ return S_ERROR( "Exception - %s: %s" % ( exceptType, value ) ) finally: # Restore the default host certificate usage if necessary if useServerCertificate: gConfigurationData.setOptionInCFG( '/DIRAC/Security/UseServerCertificate', 'true' ) if originalUserProxy: os.environ['X509_USER_PROXY'] = originalUserProxy else: os.environ.pop( 'X509_USER_PROXY' ) else: # No proxy substitution requested return fcn( *args, **kwargs )
def prepareTransformationTasks( self, transBody, taskDict, owner = '', ownerGroup = '', ownerDN = '' ): """ Prepare tasks, given a taskDict, that is created (with some manipulation) by the DB jobClass is by default "DIRAC.Interfaces.API.Job.Job". An extension of it also works. """ if ( not owner ) or ( not ownerGroup ): res = getProxyInfo( False, False ) if not res['OK']: return res proxyInfo = res['Value'] owner = proxyInfo['username'] ownerGroup = proxyInfo['group'] if not ownerDN: res = getDNForUsername( owner ) if not res['OK']: return res ownerDN = res['Value'][0] for taskNumber in sorted( taskDict ): oJob = self.jobClass( transBody ) paramsDict = taskDict[taskNumber] site = oJob.workflow.findParameter( 'Site' ).getValue() paramsDict['Site'] = site jobType = oJob.workflow.findParameter( 'JobType' ).getValue() paramsDict['JobType'] = jobType transID = paramsDict['TransformationID'] self._logVerbose( 'Setting job owner:group to %s:%s' % ( owner, ownerGroup ) ) oJob.setOwner( owner ) oJob.setOwnerGroup( ownerGroup ) oJob.setOwnerDN( ownerDN ) transGroup = str( transID ).zfill( 8 ) self._logVerbose( 'Adding default transformation group of %s' % ( transGroup ) ) oJob.setJobGroup( transGroup ) constructedName = str( transID ).zfill( 8 ) + '_' + str( taskNumber ).zfill( 8 ) self._logVerbose( 'Setting task name to %s' % constructedName ) oJob.setName( constructedName ) oJob._setParamValue( 'PRODUCTION_ID', str( transID ).zfill( 8 ) ) oJob._setParamValue( 'JOB_ID', str( taskNumber ).zfill( 8 ) ) inputData = None self._logDebug( 'TransID: %s, TaskID: %s, paramsDict: %s' % ( transID, taskNumber, str( paramsDict ) ) ) # These helper functions do the real job sites = self._handleDestination( paramsDict ) if not sites: self._logError( 'Could not get a list a sites' ) taskDict[taskNumber]['TaskObject'] = '' continue else: self._logVerbose( 'Setting Site: ', str( sites ) ) res = oJob.setDestination( sites ) if not res['OK']: self._logError( 'Could not set the site: %s' % res['Message'] ) continue self._handleInputs( oJob, paramsDict ) self._handleRest( oJob, paramsDict ) hospitalTrans = [int( x ) for x in self.opsH.getValue( "Hospital/Transformations", [] )] if int( transID ) in hospitalTrans: self._handleHospital( oJob ) taskDict[taskNumber]['TaskObject'] = '' if self.outputDataModule: res = self.getOutputData( {'Job':oJob._toXML(), 'TransformationID':transID, 'TaskID':taskNumber, 'InputData':inputData}, moduleLocation = self.outputDataModule ) if not res ['OK']: self._logError( "Failed to generate output data", res['Message'] ) continue for name, output in res['Value'].items(): oJob._addJDLParameter( name, ';'.join( output ) ) taskDict[taskNumber]['TaskObject'] = self.jobClass( oJob._toXML() ) return S_OK( taskDict )
admin = DiracAdmin() userName = admin._getCurrentUser() if not userName["OK"]: gLogger.error( userName["Message"] ) sys.exit( -1 ) userName = userName["Value"] gLogger.always( "current user is '%s'" % userName ) userGroups = getGroupsForUser( userName ) if not userGroups["OK"]: gLogger.error( userGroups["Message"] ) sys.exit( -1 ) userGroups = userGroups["Value"] if userGroup not in userGroups: gLogger.error( "'%s' is not a member of the '%s' group" % ( userName, userGroup ) ) sys.exit( -1 ) userDN = getDNForUsername( userName ) if not userDN["OK"]: gLogger.error( userDN["Message"] ) sys.exit( -1 ) userDN = userDN["Value"][0] gLogger.always( "userDN is %s" % userDN ) fct = FullChainTest() put = fct.putRequest( userName, userDN, userGroup, sourceSE, targetSE1, targetSE2 )
def getPilotMonitorWeb( self, selectDict, sortList, startItem, maxItems ): """ Get summary of the pilot job information in a standard structure """ resultDict = {} if selectDict.has_key( 'LastUpdateTime' ): del selectDict['LastUpdateTime'] if selectDict.has_key( 'Owner' ): userList = selectDict['Owner'] if type( userList ) != type( [] ): userList = [userList] dnList = [] for uName in userList: uList = getDNForUsername( uName )['Value'] dnList += uList selectDict['OwnerDN'] = dnList del selectDict['Owner'] startDate = selectDict.get( 'FromDate', None ) if startDate: del selectDict['FromDate'] # For backward compatibility if startDate is None: startDate = selectDict.get( 'LastUpdateTime', None ) if startDate: del selectDict['LastUpdateTime'] endDate = selectDict.get( 'ToDate', None ) if endDate: del selectDict['ToDate'] # Sorting instructions. Only one for the moment. if sortList: orderAttribute = sortList[0][0] + ":" + sortList[0][1] else: orderAttribute = None # Select pilots for the summary result = self.selectPilots( selectDict, orderAttribute = orderAttribute, newer = startDate, older = endDate, timeStamp = 'LastUpdateTime' ) if not result['OK']: return S_ERROR( 'Failed to select pilots: ' + result['Message'] ) pList = result['Value'] nPilots = len( pList ) resultDict['TotalRecords'] = nPilots if nPilots == 0: return S_OK( resultDict ) ini = startItem last = ini + maxItems if ini >= nPilots: return S_ERROR( 'Item number out of range' ) if last > nPilots: last = nPilots pilotList = pList[ini:last] paramNames = ['PilotJobReference', 'OwnerDN', 'OwnerGroup', 'GridType', 'Broker', 'Status', 'DestinationSite', 'BenchMark', 'ParentID', 'SubmissionTime', 'PilotID', 'LastUpdateTime', 'CurrentJobID', 'TaskQueueID', 'GridSite'] result = self.getPilotInfo( pilotList, paramNames = paramNames ) if not result['OK']: return S_ERROR( 'Failed to get pilot info: ' + result['Message'] ) pilotDict = result['Value'] records = [] for pilot in pilotList: parList = [] for parameter in paramNames: if type( pilotDict[pilot][parameter] ) not in [IntType, LongType]: parList.append( str( pilotDict[pilot][parameter] ) ) else: parList.append( pilotDict[pilot][parameter] ) if parameter == 'GridSite': gridSite = pilotDict[pilot][parameter] # If the Grid Site is unknown try to recover it in the last moment if gridSite == "Unknown": ce = pilotDict[pilot]['DestinationSite'] result = getSiteForCE( ce ) if result['OK']: gridSite = result['Value'] del parList[-1] parList.append( gridSite ) records.append( parList ) resultDict['ParameterNames'] = paramNames resultDict['Records'] = records return S_OK( resultDict )
def getPilotMonitorWeb(self, selectDict, sortList, startItem, maxItems): """ Get summary of the pilot job information in a standard structure """ resultDict = {} if 'LastUpdateTime' in selectDict: del selectDict['LastUpdateTime'] if 'Owner' in selectDict: userList = selectDict['Owner'] if not isinstance(userList, list): userList = [userList] dnList = [] for uName in userList: uList = getDNForUsername(uName)['Value'] dnList += uList selectDict['OwnerDN'] = dnList del selectDict['Owner'] startDate = selectDict.get('FromDate', None) if startDate: del selectDict['FromDate'] # For backward compatibility if startDate is None: startDate = selectDict.get('LastUpdateTime', None) if startDate: del selectDict['LastUpdateTime'] endDate = selectDict.get('ToDate', None) if endDate: del selectDict['ToDate'] # Sorting instructions. Only one for the moment. if sortList: orderAttribute = sortList[0][0] + ":" + sortList[0][1] else: orderAttribute = None # Select pilots for the summary result = self.selectPilots(selectDict, orderAttribute=orderAttribute, newer=startDate, older=endDate, timeStamp='LastUpdateTime') if not result['OK']: return S_ERROR('Failed to select pilots: ' + result['Message']) pList = result['Value'] nPilots = len(pList) resultDict['TotalRecords'] = nPilots if nPilots == 0: return S_OK(resultDict) ini = startItem last = ini + maxItems if ini >= nPilots: return S_ERROR('Item number out of range') if last > nPilots: last = nPilots pilotList = pList[ini:last] paramNames = [ 'PilotJobReference', 'OwnerDN', 'OwnerGroup', 'GridType', 'Broker', 'Status', 'DestinationSite', 'BenchMark', 'ParentID', 'SubmissionTime', 'PilotID', 'LastUpdateTime', 'CurrentJobID', 'TaskQueueID', 'GridSite' ] result = self.getPilotInfo(pilotList, paramNames=paramNames) if not result['OK']: return S_ERROR('Failed to get pilot info: ' + result['Message']) pilotDict = result['Value'] records = [] for pilot in pilotList: parList = [] for parameter in paramNames: if not isinstance(pilotDict[pilot][parameter], six.integer_types): parList.append(str(pilotDict[pilot][parameter])) else: parList.append(pilotDict[pilot][parameter]) if parameter == 'GridSite': gridSite = pilotDict[pilot][parameter] # If the Grid Site is unknown try to recover it in the last moment if gridSite == "Unknown": ce = pilotDict[pilot]['DestinationSite'] result = getCESiteMapping(ce) if result['OK']: gridSite = result['Value'].get(ce) del parList[-1] parList.append(gridSite) records.append(parList) resultDict['ParameterNames'] = paramNames resultDict['Records'] = records return S_OK(resultDict)
admin = DiracAdmin() userName = admin._getCurrentUser() if not userName["OK"]: gLogger.error(userName["Message"]) sys.exit(-1) userName = userName["Value"] gLogger.always("current user is '%s'" % userName) userGroups = getGroupsForUser(userName) if not userGroups["OK"]: gLogger.error(userGroups["Message"]) sys.exit(-1) userGroups = userGroups["Value"] if userGroup not in userGroups: gLogger.error("'%s' is not a member of the '%s' group" % (userName, userGroup)) sys.exit(-1) userDN = getDNForUsername(userName) if not userDN["OK"]: gLogger.error(userDN["Message"]) sys.exit(-1) userDN = userDN["Value"][0] gLogger.always("userDN is %s" % userDN) fct = FullChainTest() put = fct.putRequest(userName, userDN, userGroup, sourceSE, targetSE1, targetSE2)