コード例 #1
0
ファイル: DataCache.py プロジェクト: TaykYoku/DIRAC
 def __init__(self, dirName="accountingPlots"):
     self.graphsLocation = os.path.join(rootPath, "data", dirName)
     self.cachedGraphs = {}
     self.alive = True
     self.purgeThread = threading.Thread(target=self.purgeExpired)
     self.purgeThread.setDaemon(1)
     self.purgeThread.start()
     self.__dataCache = DictCache()
     self.__graphCache = DictCache(deleteFunction=self._deleteGraph)
     self.__dataLifeTime = 600
     self.__graphLifeTime = 3600
コード例 #2
0
 def __init__(self):
     self.graphsLocation = os.path.join(
         gConfig.getValue('/LocalSite/InstancePath', rootPath), 'data',
         'accountingPlots')
     self.cachedGraphs = {}
     self.alive = True
     self.purgeThread = threading.Thread(target=self.purgeExpired)
     self.purgeThread.setDaemon(1)
     self.purgeThread.start()
     self.__dataCache = DictCache()
     self.__graphCache = DictCache(deleteFunction=self._deleteGraph)
     self.__dataLifeTime = 600
     self.__graphLifeTime = 3600
コード例 #3
0
ファイル: GridPilotDirector.py プロジェクト: acasajus/DIRAC
    def __init__(self, submitPool):
        """
     Define some defaults and call parent __init__
    """
        self.gridEnv = GRIDENV

        self.cpuPowerRef = CPU_POWER_REF
        self.requirements = REQUIREMENTS
        self.rank = RANK
        self.fuzzyRank = FUZZY_RANK

        self.__failingWMSCache = DictCache()
        self.__ticketsWMSCache = DictCache()
        self.__listMatchWMSCache = DictCache()

        PilotDirector.__init__(self, submitPool)
コード例 #4
0
 def __init__(self, plotsLocation=False):
     self.plotsLocation = plotsLocation
     self.alive = True
     self.__graphCache = DictCache(deleteFunction=_deleteGraph)
     self.__graphLifeTime = 600
     self.purgeThread = threading.Thread(target=self.purgeExpired)
     self.purgeThread.start()
コード例 #5
0
 def updateDelayCounters(self, siteName, jid):
     # Get the info from the CS
     siteSection = "%s/%s" % (self.__matchingDelaySection, siteName)
     result = self.__extractCSData(siteSection)
     if not result["OK"]:
         return result
     delayDict = result["Value"]
     # limitsDict is something like { 'JobType' : { 'Merge' : 20, 'MCGen' : 1000 } }
     if not delayDict:
         return S_OK()
     attNames = []
     for attName in delayDict:
         if attName not in self.jobDB.jobAttributeNames:
             self.log.error("Attribute does not exist in the JobDB. Please fix it!", "(%s)" % attName)
         else:
             attNames.append(attName)
     result = self.jobDB.getJobAttributes(jid, attNames)
     if not result["OK"]:
         self.log.error("Error while retrieving attributes", "coming from %s: %s" % (siteSection, result["Message"]))
         return result
     atts = result["Value"]
     # Create the DictCache if not there
     if siteName not in self.delayMem:
         self.delayMem[siteName] = DictCache()
     # Update the counters
     delayCounter = self.delayMem[siteName]
     for attName in atts:
         attValue = atts[attName]
         if attValue in delayDict[attName]:
             delayTime = delayDict[attName][attValue]
             self.log.notice("Adding delay for %s/%s=%s of %s secs" % (siteName, attName, attValue, delayTime))
             delayCounter.add((attName, attValue), delayTime)
     return S_OK()
コード例 #6
0
    def __init__(self, lifeTime, updateFunc):
        """
    Constructor
    
    :Parameters:
      **lifeTime** - `int`
        Lifetime of the elements in the cache ( seconds ! )
      **updateFunc** - `function`
        This function MUST return a S_OK | S_ERROR object. In the case of the first,
        its value must be a dictionary.
    
    """

        # We set a 20% of the lifetime randomly, so that if we have thousands of jobs
        # starting at the same time, all the caches will not end at the same time.
        randomLifeTimeBias = 0.2 * random.random()

        self.log = gLogger.getSubLogger(self.__class__.__name__)

        self.__lifeTime = int(lifeTime * (1 + randomLifeTimeBias))
        self.__updateFunc = updateFunc
        # The records returned from the cache must be valid at least 30 seconds.
        self.__validSeconds = 30

        # Cache
        self.__cache = DictCache()
        self.__cacheLock = LockRing()
        self.__cacheLock.getLock(self.__class__.__name__)
コード例 #7
0
ファイル: NotificationHandler.py プロジェクト: TaykYoku/DIRAC
 def initializeHandler(cls, serviceInfo):
     """Handler initialization"""
     cls.notDB = NotificationDB()
     cls.mailCache = DictCache()
     gThreadScheduler.addPeriodicTask(3600,
                                      cls.notDB.purgeExpiredNotifications)
     gThreadScheduler.addPeriodicTask(3600, cls.mailCache.purgeExpired())
     return S_OK()
コード例 #8
0
ファイル: FTS3Agent.py プロジェクト: thom991/DIRAC
    def getFTS3Context(self, username, group, ftsServer, threadID):
        """ Returns an fts3 context for a given user, group and fts server

        The context pool is per thread, and there is one context
        per tuple (user, group, server).
        We dump the proxy of a user to a file (shared by all the threads),
        and use it to make the context.
        The proxy needs a lifetime of at least 2h, is cached for 1.5h, and
        the lifetime of the context is 45mn

        :param username: name of the user
        :param group: group of the user
        :param ftsServer: address of the server

        :returns: S_OK with the context object

    """

        log = gLogger.getSubLogger("getFTS3Context", child=True)

        contextes = self._globalContextCache.setdefault(threadID, DictCache())

        idTuple = (username, group, ftsServer)
        log.debug("Getting context for %s" % (idTuple, ))

        if not contextes.exists(idTuple, 2700):
            res = getDNForUsername(username)
            if not res['OK']:
                return res
            # We take the first DN returned
            userDN = res['Value'][0]

            log.debug("UserDN %s" % userDN)

            # We dump the proxy to a file.
            # It has to have a lifetime of at least 2 hours
            # and we cache it for 1.5 hours
            res = gProxyManager.downloadVOMSProxyToFile(userDN,
                                                        group,
                                                        requiredTimeLeft=7200,
                                                        cacheTime=5400)
            if not res['OK']:
                return res

            proxyFile = res['Value']
            log.debug("Proxy file %s" % proxyFile)

            # We generate the context
            res = FTS3Job.generateContext(ftsServer, proxyFile)
            if not res['OK']:
                return res
            context = res['Value']

            # we add it to the cache for this thread for 1h
            contextes.add(idTuple, 3600, context)

        return S_OK(contextes.get(idTuple))
コード例 #9
0
ファイル: DIRACPilotDirector.py プロジェクト: mesmith75/DIRAC
  def __init__( self, submitPool ):
    """
     Define some defaults and call parent __init__
    """
    self.gridMiddleware    = 'DIRAC'

    PilotDirector.__init__( self, submitPool )

    self.computingElementList = COMPUTING_ELEMENTS
    self.computingElementDict = {}
    self.addComputingElement( self.computingElementList )

    self.siteName          = gConfig.getValue('/LocalSite/Site','')
    if not self.siteName:
      self.log.error( 'Can not run a Director if Site Name is not defined' )
      sys.exit()

    self.__failingCECache  = DictCache()
    self.__ticketsCECache  = DictCache()
コード例 #10
0
class HttpStorageAccessHandler(BaseHTTPServer.BaseHTTPRequestHandler):

    register = DictCache()
    basePath = ''

    def do_GET(self):
        """Serve a GET request."""

        # Strip off leading slash
        key = self.path[1:]
        if not self.register.exists(key):
            self.send_error(401, "Invalid key provided, access denied")
            return None

        cache_path = self.register.get(key)
        fileList = os.listdir(cache_path)
        if len(fileList) == 1:
            path = os.path.join(cache_path, fileList[0])
        else:
            # multiple files, make archive
            unique = str(random.getrandbits(24))
            fileString = ' '.join(fileList)
            os.system('tar -cf %s/dirac_data_%s.tar --remove-files -C %s %s' %
                      (cache_path, unique, cache_path, fileString))
            path = os.path.join(cache_path, 'dirac_data_%s.tar' % unique)

        f = self.send_head(path)
        if f:
            shutil.copyfileobj(f, self.wfile)
            f.close()
            self.register.delete(key)

    def send_head(self, path):
        """ Prepare headers for the file download
    """
        #path = self.translate_path(self.path)
        f = None
        try:
            # Always read in binary mode. Opening files in text mode may cause
            # newline translations, making the actual size of the content
            # transmitted *less* than the content-length!
            f = open(path, 'rb')
        except IOError:
            self.send_error(404, "File not found")
            return None
        self.send_response(200)
        self.send_header("Content-type", 'application/octet-stream')
        fs = os.fstat(f.fileno())
        self.send_header("Content-Length", str(fs[6]))
        self.send_header("Last-Modified", self.date_time_string(fs.st_mtime))
        fname = os.path.basename(path)
        self.send_header("Last-Modified", self.date_time_string(fs.st_mtime))
        self.send_header("Content-Disposition", "filename=%s" % fname)
        self.end_headers()
        return f
コード例 #11
0
ファイル: Limiter.py プロジェクト: Eo300/DIRAC
  def __init__(self, jobDB=None, opsHelper=None):
    """ Constructor
    """
    self.__runningLimitSection = "JobScheduling/RunningLimit"
    self.__matchingDelaySection = "JobScheduling/MatchingDelay"
    self.csDictCache = DictCache()
    self.condCache = DictCache()
    self.delayMem = {}

    if jobDB:
      self.jobDB = jobDB
    else:
      self.jobDB = JobDB()

    self.log = gLogger.getSubLogger("Limiter")

    if opsHelper:
      self.__opsHelper = opsHelper
    else:
      self.__opsHelper = Operations()
コード例 #12
0
ファイル: GatewayService.py プロジェクト: sparsh35/DIRAC
 def __init__(self):
   """ Initialize like a real service
   """
   super(GatewayService, self).__init__(
       {'modName': GatewayService.GATEWAY_NAME,
        'loadName': GatewayService.GATEWAY_NAME,
        'standalone': True,
        'moduleObj': sys.modules[DIRAC.Core.DISET.private.GatewayService.GatewayService.__module__],
        'classObj': self.__class__})
   self.__delegatedCredentials = DictCache()
   self.__transferBytesLimit = 1024 * 1024 * 100
   # to be resolved
   self._url = None
   self._handler = None
   self._threadPool = None
   self._msgBroker = None
   self._msgForwarder = None
コード例 #13
0
ファイル: RSSCache.py プロジェクト: thom991/DIRAC
  def __init__( self, lifeTime, updateFunc = None, cacheHistoryLifeTime = None ):
    '''
    Constructor
    '''

    self.__lifeTime             = lifeTime
    # lifetime of the history on hours
    self.__cacheHistoryLifeTime = ( 1 and cacheHistoryLifeTime ) or 24
    self.__updateFunc           = updateFunc

    # RSSCache
    self.__rssCache       = DictCache()
    self.__rssCacheStatus = [] # ( updateTime, message )
    self.__rssCacheLock   = threading.Lock()

    # Create purgeThread
    self.__refreshStop    = False
    self.__refreshThread  = threading.Thread( target = self.__refreshCacheThreadRun )
    self.__refreshThread.setDaemon( True )
コード例 #14
0
    def initializeHandler(cls, *args):
        """Initialization

        :return: S_OK()/S_ERROR()
        """
        # Let's try to connect to the database
        try:
            cls.__tokenDB = TokenDB(parentLogger=cls.log)
        except Exception as e:
            cls.log.exception(e)
            return S_ERROR(f"Could not connect to the database {repr(e)}")

        # Cache containing tokens from scope requested by the client
        cls.__tokensCache = DictCache()

        # The service plays an important OAuth 2.0 role, namely it is an Identity Provider client.
        # This allows you to manage tokens without the involvement of their owners.
        cls.idps = IdProviderFactory()
        return S_OK()
コード例 #15
0
  def __init__( self, submitPool ):
    """
     Define the logger and some defaults
    """

    if submitPool == self.gridMiddleware:
      self.log = gLogger.getSubLogger( '%sPilotDirector' % self.gridMiddleware )
    else:
      self.log = gLogger.getSubLogger( '%sPilotDirector/%s' % ( self.gridMiddleware, submitPool ) )

    self.pilot = DIRAC_PILOT
    self.submitPoolOption = '-o /Resources/Computing/CEDefaults/SubmitPool=%s' % submitPool
    self.extraPilotOptions = []
    self.installVersion = DIRAC_VERSION
    self.installProject = DIRAC_PROJECT
    self.installation = DIRAC_INSTALLATION
    self.pilotExtensionsList = []

    self.virtualOrganization = VIRTUAL_ORGANIZATION
    self.install = DIRAC_INSTALL
    self.extraModules = DIRAC_MODULES
    self.maxJobsInFillMode = MAX_JOBS_IN_FILLMODE
    self.targetGrids = [ self.gridMiddleware ]


    self.enableListMatch = ENABLE_LISTMATCH
    self.listMatchDelay = LISTMATCH_DELAY
    self.listMatchCache = DictCache()

    self.privatePilotFraction = PRIVATE_PILOT_FRACTION

    self.errorClearTime = ERROR_CLEAR_TIME
    self.errorTicketTime = ERROR_TICKET_TIME
    self.errorMailAddress = DIRAC.errorMail
    self.alarmMailAddress = DIRAC.alarmMail
    self.mailFromAddress = FROM_MAIL

    self.siteClient = SiteStatus()

    if not  'log' in self.__dict__:
      self.log = gLogger.getSubLogger( 'PilotDirector' )
    self.log.info( 'Initialized' )
コード例 #16
0
ファイル: StorageUsageAgent.py プロジェクト: antolu/LHCbDIRAC
    def __init__(self, *args, **kwargs):
        ''' c'tor
    '''
        AgentModule.__init__(self, *args, **kwargs)

        self.__baseDir = '/lhcb'
        self.__baseDirLabel = "_".join(List.fromChar(self.__baseDir, "/"))
        self.__ignoreDirsList = []
        self.__keepDirLevels = 4

        self.__startExecutionTime = long(time.time())
        self.__dirExplorer = DirectoryExplorer(reverse=True)
        self.__processedDirs = 0
        self.__directoryOwners = {}
        self.catalog = FileCatalog()
        self.__maxToPublish = self.am_getOption('MaxDirectories', 5000)
        if self.am_getOption('DirectDB', False):
            self.storageUsage = StorageUsageDB()
        else:
            # Set a timeout of 0.1 seconds per directory (factor 5 margin)
            self.storageUsage = RPCClient('DataManagement/StorageUsage',
                                          timeout=self.am_getOption(
                                              'Timeout',
                                              int(self.__maxToPublish * 0.1)))
        self.activePeriod = self.am_getOption('ActivePeriod',
                                              self.activePeriod)
        self.dataLock = threading.Lock()
        self.replicaListLock = threading.Lock()
        self.proxyCache = DictCache(removeProxy)
        self.__noProxy = set()
        self.__catalogType = None
        self.__recalculateUsage = Operations().getValue(
            'DataManagement/RecalculateDirSize', False)
        self.enableStartupSleep = self.am_getOption('EnableStartupSleep',
                                                    self.enableStartupSleep)
        self.__publishDirQueue = {}
        self.__dirsToPublish = {}
        self.__replicaFilesUsed = set()
        self.__replicaListFilesDir = ""
コード例 #17
0
from DIRAC.Core.Utilities.ReturnValues import returnSingleResult
from DIRAC.ConfigurationSystem.Client.Helpers import Registry

## globals
BASE_PATH = ""
HTTP_FLAG = False
HTTP_PORT = 9180
HTTP_PATH = ""


def purgeCacheDirectory(path):
    """ del recursively :path: """
    shutil.rmtree(path)


gRegister = DictCache(purgeCacheDirectory)


def initializeStorageElementProxyHandler(serviceInfo):
    """ handler initialisation """

    global BASE_PATH, HTTP_FLAG, HTTP_PORT, HTTP_PATH
    cfgPath = serviceInfo['serviceSectionPath']

    BASE_PATH = gConfig.getValue("%s/BasePath" % cfgPath, BASE_PATH)
    if not BASE_PATH:
        gLogger.error('Failed to get the base path')
        return S_ERROR('Failed to get the base path')

    BASE_PATH = os.path.abspath(BASE_PATH)
    gLogger.info('The base path obtained is %s. Checking its existence...' %
コード例 #18
0
ファイル: ProxyManagerClient.py プロジェクト: pmusset/DIRAC
 def __init__(self):
   self.__usersCache = DictCache()
   self.__proxiesCache = DictCache()
   self.__vomsProxiesCache = DictCache()
   self.__pilotProxiesCache = DictCache()
   self.__filesCache = DictCache(self.__deleteTemporalFile)
コード例 #19
0
ファイル: IdProviderFactory.py プロジェクト: TaykYoku/DIRAC
 def __init__(self):
     """Standard constructor"""
     self.log = gLogger.getSubLogger(self.__class__.__name__)
     self.cacheMetadata = DictCache()
コード例 #20
0
ファイル: JobMonitor.py プロジェクト: nikolalazovski/DIRACWeb
class JobmonitorController(BaseController):
  __imgCache = DictCache()
################################################################################
  def display(self):
    pagestart = time()
    group = credentials.getSelectedGroup()
    if group == "visitor" and credentials.getUserDN == "":
      return render("/login.mako")
    c.select = self.__getSelectionData()
    if not c.select.has_key("extra"):
      groupProperty = credentials.getProperties(group)
      if ( "JobAdministrator" or "JobSharing" ) not in groupProperty: #len(groupProperty) == 1 and groupProperty[0] == "NormalUser":
        c.select["extra"] = {"owner":credentials.getUsername()}
    return render("jobs/JobMonitor.mako")
################################################################################
  def __getJobSummary(self,jobs,head):
    valueList = []
    for i in jobs:
      valueList.append({"id":str(i[2]),"status":str(i[6]),"minorStatus":str(i[10]),"applicationStatus":str(i[11]),"site":str(i[26]),"jobname":str(i[22]),"lastUpdate":str(i[25]),"owner":str(i[31]),"submissionTime":str(i[12]),"signTime":str(i[3])})
    return valueList
################################################################################
  @jsonify
  def submit(self):
    pagestart = time()
    RPC = getRPCClient("WorkloadManagement/JobMonitoring")
    user = str(credentials.getUsername())
    result = RPC.getOwners()
    if result["OK"]:
      defaultGroup = gConfig.getValue("/Registry/DefaultGroup","")
      if defaultGroup == "":
        return {"success":"false","error":"Option /Registry/DefaultGroup is undefined, please set the default group in the CS"}
      group = str(credentials.getSelectedGroup())
      groupProperty = credentials.getProperties(group)
      if user not in result["Value"] and ( "JobAdministrator" or "JobSharing" ) not in groupProperty:
        c.result = {"success":"false","error":"You don't have any jobs in the DIRAC system"}
        return c.result
    else:
      c.result = {"success":"false","error":result["Message"]}
      return c.result
    req = self.__request()
    gLogger.always("getJobPageSummaryWeb(%s,%s,%s,%s)" % (req,globalSort,pageNumber,numberOfJobs))
    result = RPC.getJobPageSummaryWeb(req,globalSort,pageNumber,numberOfJobs)
    gLogger.always(" - REZ: " %result)
    if result["OK"]:
      result = result["Value"]
      gLogger.info("ReS",result)
      if result.has_key("TotalRecords"):
        if  result["TotalRecords"] > 0:
          if result.has_key("ParameterNames") and result.has_key("Records"):
            if len(result["ParameterNames"]) > 0:
              if len(result["Records"]) > 0:
                c.result = []
                jobs = result["Records"]
                head = result["ParameterNames"]
                headLength = len(head)
                for i in jobs:
                  tmp = {}
                  for j in range(0,headLength):
                    tmp[head[j]] = i[j]
                  c.result.append(tmp)
                total = result["TotalRecords"]
                timestamp = Time.dateTime().strftime("%Y-%m-%d %H:%M [UTC]")
                if result.has_key("Extras"):
                  st = self.__dict2string(req)
                  extra = result["Extras"]
                  c.result = {"success":"true","result":c.result,"total":total,"extra":extra,"request":st,"date":timestamp}
                else:
                  c.result = {"success":"true","result":c.result,"total":total,"date":timestamp}
              else:
                c.result = {"success":"false","result":"","error":"There are no data to display"}
            else:
              c.result = {"success":"false","result":"","error":"ParameterNames field is missing"}
          else:
            c.result = {"success":"false","result":"","error":"Data structure is corrupted"}
        else:
          c.result = {"success":"false","result":"","error":"There were no data matching your selection"}
      else:
        c.result = {"success":"false","result":"","error":"Data structure is corrupted"}
    else:
      c.result = {"success":"false","error":result["Message"]}
    gLogger.info("\033[0;31mJOB SUBMIT REQUEST:\033[0m %s" % (time() - pagestart))
    return c.result
################################################################################
  def __dict2string(self,req):
    result = ""
    try:
      for key,value in req.iteritems():
        result = result + str(key) + ": " + ", ".join(value) + "; "
    except Exception, x:
      gLogger.info("\033[0;31m Exception: \033[0m %s" % x)
    result = result.strip()
    result = result[:-1]
    return result
コード例 #21
0
ファイル: SiteSummary.py プロジェクト: acasajus/DIRACWeb
########
from DIRAC.FrameworkSystem.Client.UserProfileClient import UserProfileClient
########

log = logging.getLogger(__name__)

global numberOfJobs
global pageNumber
global globalSort
numberOfJobs = 25
pageNumber = 0
globalSort = []

global imgCache
imgCache = DictCache()
#globalSort = [["SiteName","DESC"]]


class SitesummaryController(BaseController):
    ################################################################################
    #  def profile(self):
    #    upc = UserProfileClient( profileName, getRPCClient( "Framework/UserProfileManager" ) )
    #    upc.storeWebData( varNameInProfile, data )
    #    upc.retrieveWebData( varInProfile )
    ################################################################################
    def display(self):
        pagestart = time()
        c.select = self.__getSelectionData()
        gLogger.info("SELECTION RESULTS:", c.select)
        gLogger.info("\033[0;31mSITESUMMARY INDEX REQUEST:\033[0m %s" %
コード例 #22
0
 def __init__( self ):
   Service.__init__( self, GatewayService.GATEWAY_NAME )
   self.__delegatedCredentials = DictCache()
   self.__transferBytesLimit = 1024 * 1024 * 100
コード例 #23
0
class OAuthManagerClient(Client):
    """ Authentication manager

      Contain IdPsCache cache, with next structure:
      {
        <ID1>: {
          Providers: [ <identity providers> ],
          <identity provider>: [
            {
              <sessions number>: { <tokens> }
            },
            { ... }
          ],
          DNs: [
            <DN1>: {
              ProxyProvider: [ <proxy providers> ],
              VOMSRoles: [ <VOMSRoles> ],
              ...
            },
            <DN2>: { ... },
          ]
        },
        <ID2>: { ... },
      }
  """
    __metaclass__ = DIRACSingleton.DIRACSingleton

    IdPsCache = DictCache()

    def __init__(self, **kwargs):
        """ Constructor
    """
        super(OAuthManagerClient, self).__init__(**kwargs)
        self.setServer('Framework/OAuthManager')
        self.refreshIdPs()

    def refreshIdPs(self, IDs=None, sessionIDDict=None):
        """ Update cache from OAuthDB or dictionary

        :param list IDs: list of IDs
        :param dict sessionIDDict: session ID dictionary

        :return: S_OK()/S_ERROR()
    """
        # Update cache from dictionary
        if sessionIDDict:
            for ID, infoDict in sessionIDDict.items():
                self.IdPsCache.add(ID, 3600 * 24, value=infoDict)
            return S_OK()

        # Update cache from DB
        self.IdPsCache.add('Fresh', 60 * 15, value=True)
        result = self._getRPC().getIdPsIDs()
        if result['OK']:
            for ID, infoDict in result['Value'].items():
                if len(infoDict['Providers']) > 1:
                    gLogger.warn(
                        '%s user ID used by more that one providers:' % ID,
                        ', '.join(infoDict['Providers']))
                self.IdPsCache.add(ID, 3600 * 24, infoDict)
        return S_OK() if result['OK'] else result

    def getIdPsCache(self, IDs=None):
        """ Return IdPs cache

        :param list IDs: IDs

        :return: S_OK(dict)/S_ERROR() -- dictionary contain ID as key and information collected from IdP
    """
        # Update cache if not actual
        if not self.IdPsCache.get('Fresh'):
            result = self.refreshIdPs()
            if not result['OK']:
                return result
        __IdPsCache = self.IdPsCache.getDict()

        # Return cache without Fresh key
        __IdPsCache.pop('Fresh', None)
        if not IDs:
            return S_OK(__IdPsCache)
        resDict = {}
        for ID, idDict in __IdPsCache.items():
            if ID in IDs:
                resDict[ID] = idDict
        return S_OK(resDict)

    def getIDForSession(self, session):
        """ Find ID for session
    
        :param basestring session: session number
        
        :return: S_OK()/S_ERROR()
    """
        __IdPsCache = self.IdPsCache.getDict()
        __IdPsCache.pop('Fresh', None)
        for ID, infoDict in __IdPsCache.items():
            for prov in infoDict['Providers']:
                if session in infoDict[prov]:
                    return S_OK(ID)
        result = self.refreshIdPs()
        if not result['OK']:
            return result
        __IdPsCache = self.IdPsCache.getDict()
        __IdPsCache.pop('Fresh', None)
        for ID, infoDict in __IdPsCache.items():
            for prov in infoDict['Providers']:
                if session in infoDict[prov]:
                    return S_OK(ID)
        return S_ERROR('No ID found for session %s' % session)

    def parseAuthResponse(self, response, state):
        """ Fill session by user profile, tokens, comment, OIDC authorize status, etc.
        Prepare dict with user parameters, if DN is absent there try to get it.
        Create new or modify existend DIRAC user and store the session

        :param dict response: authorization response
        :param basestring state: session number

        :return: S_OK(dict)/S_ERROR()
    """
        result = self._getRPC().parseAuthResponse(response, state)
        if not result['OK']:
            return result
        if result['Value']['Status'] in ['authed', 'redirect']:
            refresh = self.refreshIdPs(
                sessionIDDict=result['Value']['sessionIDDict'])
            if not refresh['OK']:
                return refresh
        return result
コード例 #24
0
class Limiter(object):

  # static variables shared between all instances of this class
  csDictCache = DictCache()
  condCache = DictCache()
  delayMem = {}

  def __init__(self, jobDB=None, opsHelper=None):
    """ Constructor
    """
    self.__runningLimitSection = "JobScheduling/RunningLimit"
    self.__matchingDelaySection = "JobScheduling/MatchingDelay"

    if jobDB:
      self.jobDB = jobDB
    else:
      self.jobDB = JobDB()

    self.log = gLogger.getSubLogger("Limiter")

    if opsHelper:
      self.__opsHelper = opsHelper
    else:
      self.__opsHelper = Operations()

  def getNegativeCond(self):
    """ Get negative condition for ALL sites
    """
    orCond = self.condCache.get("GLOBAL")
    if orCond:
      return orCond
    negCond = {}
    # Run Limit
    result = self.__opsHelper.getSections(self.__runningLimitSection)
    sites = []
    if result['OK']:
      sites = result['Value']
    for siteName in sites:
      result = self.__getRunningCondition(siteName)
      if not result['OK']:
        continue
      data = result['Value']
      if data:
        negCond[siteName] = data
    # Delay limit
    result = self.__opsHelper.getSections(self.__matchingDelaySection)
    sites = []
    if result['OK']:
      sites = result['Value']
    for siteName in sites:
      result = self.__getDelayCondition(siteName)
      if not result['OK']:
        continue
      data = result['Value']
      if not data:
        continue
      if siteName in negCond:
        negCond[siteName] = self.__mergeCond(negCond[siteName], data)
      else:
        negCond[siteName] = data
    orCond = []
    for siteName in negCond:
      negCond[siteName]['Site'] = siteName
      orCond.append(negCond[siteName])
    self.condCache.add("GLOBAL", 10, orCond)
    return orCond

  def getNegativeCondForSite(self, siteName):
    """ Generate a negative query based on the limits set on the site
    """
    # Check if Limits are imposed onto the site
    negativeCond = {}
    if self.__opsHelper.getValue("JobScheduling/CheckJobLimits", True):
      result = self.__getRunningCondition(siteName)
      if result['OK']:
        negativeCond = result['Value']
      self.log.verbose('Negative conditions for site',
                       '%s after checking limits are: %s' % (siteName, str(negativeCond)))

    if self.__opsHelper.getValue("JobScheduling/CheckMatchingDelay", True):
      result = self.__getDelayCondition(siteName)
      if result['OK']:
        delayCond = result['Value']
        self.log.verbose('Negative conditions for site',
                         '%s after delay checking are: %s' % (siteName, str(delayCond)))
        negativeCond = self.__mergeCond(negativeCond, delayCond)

    if negativeCond:
      self.log.info('Negative conditions for site',
                    '%s are: %s' % (siteName, str(negativeCond)))

    return negativeCond

  def __mergeCond(self, negCond, addCond):
    """ Merge two negative dicts
    """
    # Merge both negative dicts
    for attr in addCond:
      if attr not in negCond:
        negCond[attr] = []
      for value in addCond[attr]:
        if value not in negCond[attr]:
          negCond[attr].append(value)
    return negCond

  def __extractCSData(self, section):
    """ Extract limiting information from the CS in the form:
        { 'JobType' : { 'Merge' : 20, 'MCGen' : 1000 } }
    """
    stuffDict = self.csDictCache.get(section)
    if stuffDict:
      return S_OK(stuffDict)

    result = self.__opsHelper.getSections(section)
    if not result['OK']:
      return result
    attribs = result['Value']
    stuffDict = {}
    for attName in attribs:
      result = self.__opsHelper.getOptionsDict("%s/%s" % (section, attName))
      if not result['OK']:
        return result
      attLimits = result['Value']
      try:
        attLimits = dict([(k, int(attLimits[k])) for k in attLimits])
      except Exception as excp:
        errMsg = "%s/%s has to contain numbers: %s" % (section, attName, str(excp))
        self.log.error(errMsg)
        return S_ERROR(errMsg)
      stuffDict[attName] = attLimits

    self.csDictCache.add(section, 300, stuffDict)
    return S_OK(stuffDict)

  def __getRunningCondition(self, siteName):
    """ Get extra conditions allowing site throttling
    """
    siteSection = "%s/%s" % (self.__runningLimitSection, siteName)
    result = self.__extractCSData(siteSection)
    if not result['OK']:
      return result
    limitsDict = result['Value']
    # limitsDict is something like { 'JobType' : { 'Merge' : 20, 'MCGen' : 1000 } }
    if not limitsDict:
      return S_OK({})
    # Check if the site exceeding the given limits
    negCond = {}
    for attName in limitsDict:
      if attName not in self.jobDB.jobAttributeNames:
        self.log.error("Attribute does not exist",
                       "(%s). Check the job limits" % attName)
        continue
      cK = "Running:%s:%s" % (siteName, attName)
      data = self.condCache.get(cK)
      if not data:
        result = self.jobDB.getCounters(
            'Jobs', [attName], {
                'Site': siteName, 'Status': [
                    'Running', 'Matched', 'Stalled']})
        if not result['OK']:
          return result
        data = result['Value']
        data = dict([(k[0][attName], k[1]) for k in data])
        self.condCache.add(cK, 10, data)
      for attValue in limitsDict[attName]:
        limit = limitsDict[attName][attValue]
        running = data.get(attValue, 0)
        if running >= limit:
          self.log.verbose('Job Limit imposed',
                           'at %s on %s/%s=%d, %d jobs already deployed' % (siteName,
                                                                            attName, attValue, limit, running))
          if attName not in negCond:
            negCond[attName] = []
          negCond[attName].append(attValue)
    # negCond is something like : {'JobType': ['Merge']}
    return S_OK(negCond)

  def updateDelayCounters(self, siteName, jid):
    # Get the info from the CS
    siteSection = "%s/%s" % (self.__matchingDelaySection, siteName)
    result = self.__extractCSData(siteSection)
    if not result['OK']:
      return result
    delayDict = result['Value']
    # limitsDict is something like { 'JobType' : { 'Merge' : 20, 'MCGen' : 1000 } }
    if not delayDict:
      return S_OK()
    attNames = []
    for attName in delayDict:
      if attName not in self.jobDB.jobAttributeNames:
        self.log.error("Attribute does not exist in the JobDB. Please fix it!",
                       "(%s)" % attName)
      else:
        attNames.append(attName)
    result = self.jobDB.getJobAttributes(jid, attNames)
    if not result['OK']:
      self.log.error("Error while retrieving attributes",
                     "coming from %s: %s" % (siteSection, result['Message']))
      return result
    atts = result['Value']
    # Create the DictCache if not there
    if siteName not in self.delayMem:
      self.delayMem[siteName] = DictCache()
    # Update the counters
    delayCounter = self.delayMem[siteName]
    for attName in atts:
      attValue = atts[attName]
      if attValue in delayDict[attName]:
        delayTime = delayDict[attName][attValue]
        self.log.notice("Adding delay for %s/%s=%s of %s secs" % (siteName, attName,
                                                                  attValue, delayTime))
        delayCounter.add((attName, attValue), delayTime)
    return S_OK()

  def __getDelayCondition(self, siteName):
    """ Get extra conditions allowing matching delay
    """
    if siteName not in self.delayMem:
      return S_OK({})
    lastRun = self.delayMem[siteName].getKeys()
    negCond = {}
    for attName, attValue in lastRun:
      if attName not in negCond:
        negCond[attName] = []
      negCond[attName].append(attValue)
    return S_OK(negCond)
コード例 #25
0
ファイル: StorageElement.py プロジェクト: ptakha/DIRAC-1
 def __init__(self):
     self.seCache = DictCache()
コード例 #26
0
ファイル: FTS3Agent.py プロジェクト: rob-c/DIRAC
    def getFTS3Context(self, username, group, ftsServer, threadID):
        """ Returns an fts3 context for a given user, group and fts server

        The context pool is per thread, and there is one context
        per tuple (user, group, server).
        We dump the proxy of a user to a file (shared by all the threads),
        and use it to make the context.
        The proxy needs a lifetime of self.proxyLifetime, is cached for cacheTime = (2*lifeTime/3) - 10mn,
        and the lifetime of the context is 45mn
        The reason for cacheTime to be what it is is because the FTS3 server will ask for a new proxy
        after 2/3rd of the existing proxy has expired, so we renew it just before

        :param str username: name of the user
        :param str group: group of the user
        :param str ftsServer: address of the server
        :param str threadID: thread ID

        :returns: S_OK with the context object

    """

        log = gLogger.getSubLogger("getFTS3Context", child=True)

        contextes = self._globalContextCache.setdefault(threadID, DictCache())

        idTuple = (username, group, ftsServer)
        log.debug("Getting context for %s" % (idTuple, ))

        # We keep a context in the cache for 45 minutes
        # (so it needs to be valid at least 15 since we add it for one hour)
        if not contextes.exists(idTuple, 15 * 60):
            res = getDNForUsername(username)
            if not res['OK']:
                return res
            # We take the first DN returned
            userDN = res['Value'][0]

            log.debug("UserDN %s" % userDN)

            # We dump the proxy to a file.
            # It has to have a lifetime of self.proxyLifetime
            # Because the FTS3 servers cache it for 2/3rd of the lifetime
            # we should make our cache a bit less than 2/3rd of the lifetime
            cacheTime = int(2 * self.proxyLifetime / 3) - 600
            res = gProxyManager.downloadVOMSProxyToFile(
                userDN,
                group,
                requiredTimeLeft=self.proxyLifetime,
                cacheTime=cacheTime)
            if not res['OK']:
                return res

            proxyFile = res['Value']
            log.debug("Proxy file %s" % proxyFile)

            # We generate the context
            # In practice, the lifetime will be less than proxyLifetime
            # because we reuse a cached proxy. However, the cached proxy will
            # never forced a redelegation, because it is recent enough for FTS3 servers.
            # The delegation is forced when 2/3 rd of the lifetime are left, and we get a fresh
            # one just before. So no problem
            res = FTS3Job.generateContext(ftsServer,
                                          proxyFile,
                                          lifetime=self.proxyLifetime)

            if not res['OK']:
                return res
            context = res['Value']

            # we add it to the cache for this thread for 1h
            contextes.add(idTuple, 3600, context)

        return S_OK(contextes.get(idTuple))
コード例 #27
0
class AccountingplotsController(BaseController):

    __keysCache = DictCache()

    def __getUniqueKeyValues(self, typeName):
        userGroup = getSelectedGroup()
        if 'NormalUser' in CS.getPropertiesForGroup(userGroup):
            cacheKey = (getUsername(), userGroup, getSelectedSetup(), typeName)
        else:
            cacheKey = (userGroup, getSelectedSetup(), typeName)
        data = AccountingplotsController.__keysCache.get(cacheKey)
        if not data:
            rpcClient = getRPCClient("Accounting/ReportGenerator")
            retVal = rpcClient.listUniqueKeyValues(typeName)
            if 'rpcStub' in retVal:
                del (retVal['rpcStub'])
            if not retVal['OK']:
                return retVal

            #Site ordering based on TierLevel / alpha
            if 'Site' in retVal['Value']:
                siteLevel = {}
                for siteName in retVal['Value']['Site']:
                    sitePrefix = siteName.split(".")[0].strip()
                    level = gConfig.getValue(
                        "/Resources/Sites/%s/%s/MoUTierLevel" %
                        (sitePrefix, siteName), 10)
                    if level not in siteLevel:
                        siteLevel[level] = []
                    siteLevel[level].append(siteName)
                orderedSites = []
                for level in sorted(siteLevel):
                    orderedSites.extend(sorted(siteLevel[level]))
                retVal['Value']['Site'] = orderedSites
            data = retVal
            AccountingplotsController.__keysCache.add(cacheKey, 300, data)
        return data

    def index(self):
        # Return a rendered template
        #   return render('/some/template.mako')
        # or, Return a response
        return defaultRedirect()

    def dataOperation(self):
        return self.__showPlotPage("DataOperation",
                                   "/systems/accounting/dataOperation.mako")

    def job(self):
        return self.__showPlotPage("Job", "/systems/accounting/job.mako")

    def WMSHistory(self):
        return self.__showPlotPage("WMSHistory",
                                   "/systems/accounting/WMSHistory.mako")

    def pilot(self):
        return self.__showPlotPage("Pilot", "/systems/accounting/Pilot.mako")

    def SRMSpaceTokenDeployment(self):
        return self.__showPlotPage(
            "SRMSpaceTokenDeployment",
            "/systems/accounting/SRMSpaceTokenDeployment.mako")

    def plotPage(self):
        try:
            typeName = str(request.params['typeName'])
        except:
            c.errorMessage = "Oops. missing type"
            return render("/error.mako")

        return self.__showPlotPage(typeName,
                                   "/systems/accounting/%s.mako" % typeName)

    def __showPlotPage(self, typeName, templateFile):
        #Get unique key values
        retVal = self.__getUniqueKeyValues(typeName)
        if not retVal['OK']:
            c.error = retVal['Message']
            return render("/error.mako")
        c.selectionValues = simplejson.dumps(retVal['Value'])
        #Cache for plotsList?
        data = AccountingplotsController.__keysCache.get("reportsList:%s" %
                                                         typeName)
        if not data:
            repClient = ReportsClient(
                rpcClient=getRPCClient("Accounting/ReportGenerator"))
            retVal = repClient.listReports(typeName)
            if not retVal['OK']:
                c.error = retVal['Message']
                return render("/error.mako")
            data = simplejson.dumps(retVal['Value'])
            AccountingplotsController.__keysCache.add(
                "reportsList:%s" % typeName, 300, data)
        c.plotsList = data
        return render(templateFile)

    @jsonify
    def getKeyValuesForType(self):
        try:
            typeName = str(request.params['typeName'])
        except:
            return S_ERROR("Missing or invalid type name!")
        retVal = self.__getUniqueKeyValues(typeName)
        if not retVal['OK'] and 'rpcStub' in retVal:
            del (retVal['rpcStub'])
        return retVal

    def __parseFormParams(self):
        params = request.params
        return parseFormParams(params)

    def __translateToExpectedExtResult(self, retVal):
        if retVal['OK']:
            return {'success': True, 'data': retVal['Value']['plot']}
        else:
            return {'success': False, 'errors': retVal['Message']}

    def __queryForPlot(self):
        retVal = self.__parseFormParams()
        if not retVal['OK']:
            return retVal
        params = retVal['Value']
        repClient = ReportsClient(
            rpcClient=getRPCClient("Accounting/ReportGenerator"))
        retVal = repClient.generateDelayedPlot(*params)
        return retVal

    def getPlotData(self):
        retVal = self.__parseFormParams()
        if not retVal['OK']:
            c.error = retVal['Message']
            return render("/error.mako")
        params = retVal['Value']
        repClient = ReportsClient(
            rpcClient=getRPCClient("Accounting/ReportGenerator"))
        retVal = repClient.getReport(*params)
        if not retVal['OK']:
            c.error = retVal['Message']
            return render("/error.mako")
        rawData = retVal['Value']
        groupKeys = rawData['data'].keys()
        groupKeys.sort()
        if 'granularity' in rawData:
            granularity = rawData['granularity']
            data = rawData['data']
            tS = int(Time.toEpoch(params[2]))
            timeStart = tS - tS % granularity
            strData = "epoch,%s\n" % ",".join(groupKeys)
            for timeSlot in range(timeStart, int(Time.toEpoch(params[3])),
                                  granularity):
                lineData = [str(timeSlot)]
                for key in groupKeys:
                    if timeSlot in data[key]:
                        lineData.append(str(data[key][timeSlot]))
                    else:
                        lineData.append("")
                strData += "%s\n" % ",".join(lineData)
        else:
            strData = "%s\n" % ",".join(groupKeys)
            strData += ",".join([str(rawData['data'][k]) for k in groupKeys])
        response.headers['Content-type'] = 'text/csv'
        response.headers[
            'Content-Disposition'] = 'attachment; filename="%s.csv"' % md5(
                str(params)).hexdigest()
        response.headers['Content-Length'] = len(strData)
        return strData

    @jsonify
    def generatePlot(self):
        return self.__translateToExpectedExtResult(self.__queryForPlot())

    def generatePlotAndGetHTML(self):
        retVal = self.__queryForPlot()
        if not retVal['OK']:
            return "<h2>Can't regenerate plot: %s</h2>" % retVal['Message']
        return "<img src='getPlotImg?file=%s'/>" % retVal['Value']['plot']

    def getPlotImg(self):
        """
    Get plot image
    """
        if 'file' not in request.params:
            c.error = "Maybe you forgot the file?"
            return render("/error.mako")
        plotImageFile = str(request.params['file'])
        if plotImageFile.find(".png") < -1:
            c.error = "Not a valid image!"
            return render("/error.mako")
        transferClient = getTransferClient("Accounting/ReportGenerator")
        tempFile = tempfile.TemporaryFile()
        retVal = transferClient.receiveFile(tempFile, plotImageFile)
        if not retVal['OK']:
            c.error = retVal['Message']
            return render("/error.mako")
        tempFile.seek(0)
        data = tempFile.read()
        response.headers['Content-type'] = 'image/png'
        response.headers[
            'Content-Disposition'] = 'attachment; filename="%s.png"' % md5(
                plotImageFile).hexdigest()
        response.headers['Content-Length'] = len(data)
        response.headers['Content-Transfer-Encoding'] = 'Binary'
        response.headers[
            'Cache-Control'] = "no-cache, no-store, must-revalidate, max-age=0"
        response.headers['Pragma'] = "no-cache"
        response.headers['Expires'] = (
            datetime.datetime.utcnow() -
            datetime.timedelta(minutes=-10)).strftime("%d %b %Y %H:%M:%S GMT")
        return data

    @jsonify
    def getPlotListAndSelectionValues(self):
        result = {}
        try:
            typeName = str(request.params['typeName'])
        except:
            return S_ERROR("Missing or invalid type name!")
        retVal = self.__getUniqueKeyValues(typeName)
        if not retVal['OK'] and 'rpcStub' in retVal:
            del (retVal['rpcStub'])
            return retVal
        selectionValues = retVal['Value']
        data = AccountingplotsController.__keysCache.get("reportsList:%s" %
                                                         typeName)
        if not data:
            repClient = ReportsClient(
                rpcClient=getRPCClient("Accounting/ReportGenerator"))
            retVal = repClient.listReports(typeName)
            if not retVal['OK']:
                return retVal
            data = simplejson.dumps(retVal['Value'])
            AccountingplotsController.__keysCache.add(
                "reportsList:%s" % typeName, 300, data)
        try:
            plotsList = eval(data)
        except:
            return S_ERROR('Failed to convert a string to a list!')
        return S_OK({'SelectionData': selectionValues, 'PlotList': plotsList})
コード例 #28
0
class MetacatalogueController(BaseController):
    __imgCache = DictCache()

    ################################################################################
    def display(self):
        pagestart = time()
        group = credentials.getSelectedGroup()
        if group == "visitor" and credentials.getUserDN == "":
            return render("/login.mako")
#    c.select = self.__getSelectionData()
#    if not c.select.has_key("extra"):
#      groupProperty = credentials.getProperties(group)
#      if ( "JobAdministrator" or "JobSharing" ) not in groupProperty: #len(groupProperty) == 1 and groupProperty[0] == "NormalUser":
#        c.select["extra"] = {"owner":credentials.getUsername()}
        return render("data/MetaCatalogue.mako")
################################################################################

    @jsonify
    def submit(self):
        RPC = getRPCClient("DataManagement/FileCatalog")
        req = self.__request()
        gLogger.debug("submit: incoming request %s" % req)
        result = RPC.findFilesByMetadata(req["selection"], req["path"])
        gLogger.debug("submit: result of findFilesByMetadata %s" % result)
        if not result["OK"]:
            gLogger.error("submit: %s" % result["Message"])
            return {"success": "false", "error": result["Message"]}
        result = result["Value"]
        if not len(result) > 0:
            return {"success": "true", "result": {}, "total": 0}
        callback = list()
        for key, value in result.items():
            if len(value) > 0:
                for j in value:
                    callback.append({"filename": key + "/" + j})
        return {"success": "true", "result": callback, "total": len(callback)}
################################################################################

    def __request(self):
        req = {"selection": {}, "path": "/"}
        global R_NUMBER
        global P_NUMBER
        R_NUMBER = 25
        if request.params.has_key("limit") and len(
                request.params["limit"]) > 0:
            R_NUMBER = int(request.params["limit"])
        P_NUMBER = 0
        if request.params.has_key("start") and len(
                request.params["start"]) > 0:
            P_NUMBER = int(request.params["start"])
        result = gConfig.getOption("/Website/ListSeparator")
        if result["OK"]:
            separator = result["Value"]
        else:
            separator = ":::"
        RPC = getRPCClient("DataManagement/FileCatalog")
        result = RPC.getMetadataFields()
        gLogger.debug("request: %s" % result)
        if not result["OK"]:
            gLogger.error("request: %s" % result["Message"])
            return req
        result = result["Value"]
        if not result.has_key("FileMetaFields"):
            error = "Service response has no FileMetaFields key. Return empty dict"
            gLogger.error("request: %s" % error)
            return req
        if not result.has_key("DirectoryMetaFields"):
            error = "Service response has no DirectoryMetaFields key. Return empty dict"
            gLogger.error("request: %s" % error)
            return req
        filemeta = result["FileMetaFields"]
        dirmeta = result["DirectoryMetaFields"]
        meta = []
        for key, value in dirmeta.items():
            meta.append(key)
        gLogger.always("request: metafields: %s " % meta)
        for i in request.params:
            tmp = str(i).split('.')
            if len(tmp) < 3:
                continue
            logic = tmp[1]
            if not logic in ["=", "!=", ">=", "<=", ">", "<"]:
                gLogger.always("Operand '%s' is not supported " % logic)
                continue
            name = ''.join(tmp[2:])
            if name in meta:
                if not req["selection"].has_key(name):
                    req["selection"][name] = dict()
                value = str(request.params[i]).split(separator)
                gLogger.always("Value for metafield %s: %s " % (name, value))
                if not logic in ["=", "!="]:
                    if len(value) > 1:
                        gLogger.always(
                            "List of values is not supported for %s " % logic)
                        continue
                    value = value[0]
                    req["selection"][name][logic] = value
                else:
                    if not req["selection"][name].has_key(logic):
                        req["selection"][name][logic] = value
                        continue
                    for j in value:
                        req["selection"][name][logic].append(j)
        if request.params.has_key("path"):
            req["path"] = request.params["path"]
        gLogger.always(" REQ: ", req)
        return req
################################################################################

    @jsonify
    def action(self):
        pagestart = time()
        if request.params.has_key("getSelector") and len(
                request.params["getSelector"]) > 0:
            return self.__getSelector(str(request.params["getSelector"]))
        if request.params.has_key("getSelectorGrid"):
            return self.__getSelectorGrid()
        elif request.params.has_key("getCache"):
            return self.__getMetaCache(str(request.params["getCache"]))
        elif request.params.has_key("getMeta") and len(
                request.params["getMeta"]) > 0:
            return self.__getMetadata(str(request.params["getMeta"]))
        elif request.params.has_key("getFile") and len(
                request.params["getFile"]) > 0:
            return self.__prepareURL(str(request.params["getFile"]))
        else:
            return {
                "success":
                "false",
                "error":
                "The request parameters can not be recognized or they are not defined"
            }
################################################################################

    def __getMetaCache(self, param):
        result = {
            "EvtType": [{
                "Name": "aa_e1e1e3e3_o"
            }, {
                "Name": "Z_uds"
            }],
            "NumberOfEvents": [{
                "Name": 10
            }, {
                "Name": 1500000
            }],
            "BXoverlayed": [{
                "Name": 60
            }],
            "Polarisation": [{
                "Name": "m80p20"
            }, {
                "Name": "p80m20"
            }],
            "Datatype": [{
                "Name": "DST"
            }, {
                "Name": "gen"
            }],
            "Luminosity": [{
                "Name": 98.76
            }, {
                "Name": 294.4
            }],
            "Energy": [{
                "Name": "1.4tev"
            }, {
                "Name": "1000"
            }],
            "MachineParams": [{
                "Name": "B1b_ws"
            }],
            "DetectorType": [{
                "Name": "ILD"
            }, {
                "Name": "SIM"
            }],
            "Machine": [{
                "Name": "3tev"
            }, {
                "Name": "ilc"
            }],
            "Owner": [{
                "Name": "alucacit"
            }, {
                "Name": "yimingli"
            }],
            "DetectorModel": [{
                "Name": "clic_sid_cdr"
            }, {
                "Name": "clic_sid_cdr3"
            }],
            "JobType": [{
                "Name": "gen"
            }]
        }
        return {"success": "true", "result": result}
################################################################################

    def __prepareURL(self, files):

        files = files.split(",")

        if not len(files) > 0:
            return {"success": "false", "error": "No LFN given"}
        se = getRPCClient("DataManagement/StorageElementProxy")
        result = se.prepareFileForHTTP(files)
        gLogger.always(" *** ", result)
        if not result["OK"]:
            return {"success": "false", "error": result["Message"]}
        httpURLs = result['HttpURL']
        httpKey = result['HttpKey']
        return {
            "success": "true",
            "result": {
                "url": httpURLs,
                "cookie": httpKey
            }
        }
################################################################################

    def __getMetadata(self, key=False):
        if not key:
            return {"success": "false", "error": ""}
        RPC = getRPCClient("DataManagement/FileCatalog")
        result = RPC.getCompatibleMetadata({})
        if not result["OK"]:
            return {"success": "false", "error": result["Message"]}
        result = result["Value"]
        if result.has_key(key):
            result = result[key]
        callback = []
        for i in result:
            callback.append({"name": i})
        return {"success": "true", "result": callback}
################################################################################

    def __getSelector(self, select="All"):
        RPC = getRPCClient("DataManagement/FileCatalog")
        result = RPC.getMetadataFields()
        if not result["OK"]:
            return {"success": "false", "error": result["Message"]}
        result = result["Value"]
        gLogger.always(" * * * ", result)
        for key, value in result.items():
            result[key] = value.lower()
        gLogger.always(" * * * ", result)
        return {"success": "true", "result": result}


################################################################################

    def __getSelectorGrid(self):
        """
    Get the metadata tags and prepare them to be used by ExtJS AJAX store
    """
        RPC = getRPCClient("DataManagement/FileCatalog")
        result = RPC.getMetadataFields()
        gLogger.debug("request: %s" % result)
        if not result["OK"]:
            gLogger.error("getSelectorGrid: %s" % result["Message"])
            return {"success": "false", "error": result["Message"]}
        result = result["Value"]
        callback = list()
        if not result.has_key("FileMetaFields"):
            error = "Service response has no FileMetaFields key"
            gLogger.error("getSelectorGrid: %s" % error)
            return {"success": "false", "error": error}
        if not result.has_key("DirectoryMetaFields"):
            error = "Service response has no DirectoryMetaFields key"
            gLogger.error("getSelectorGrid: %s" % error)
            return {"success": "false", "error": error}
        filemeta = result["FileMetaFields"]
        if len(filemeta) > 0:
            for key, value in filemeta.items():
                tmp = dict()
                tmp["Name"] = key
                tmp["Type"] = "label"
                callback.append(tmp)
        gLogger.debug("getSelectorGrid: FileMetaFields callback %s" % callback)
        dirmeta = result["DirectoryMetaFields"]
        if len(dirmeta) > 0:
            for key, value in dirmeta.items():
                tmp = dict()
                tmp["Name"] = key
                tmp["Type"] = value.lower()
                callback.append(tmp)
        gLogger.debug("getSelectorGrid: Resulting callback %s" % callback)
        return {"success": "true", "result": callback, "total": len(callback)}
コード例 #29
0
ファイル: MatcherHandler.py プロジェクト: ptakha/DIRAC-1
class Limiter:

    __csDictCache = DictCache()
    __condCache = DictCache()
    __delayMem = {}

    def __init__(self, opsHelper):
        """ Constructor
    """
        self.__runningLimitSection = "JobScheduling/RunningLimit"
        self.__matchingDelaySection = "JobScheduling/MatchingDelay"
        self.__opsHelper = opsHelper

    def checkJobLimit(self):
        return self.__opsHelper.getValue("JobScheduling/CheckJobLimits", True)

    def checkMatchingDelay(self):
        return self.__opsHelper.getValue("JobScheduling/CheckMatchingDelay",
                                         True)

    def getNegativeCond(self):
        """ Get negative condition for ALL sites
    """
        orCond = Limiter.__condCache.get("GLOBAL")
        if orCond:
            return orCond
        negCond = {}
        #Run Limit
        result = self.__opsHelper.getSections(self.__runningLimitSection)
        sites = []
        if result['OK']:
            sites = result['Value']
        for siteName in sites:
            result = self.__getRunningCondition(siteName)
            if not result['OK']:
                continue
            data = result['Value']
            if data:
                negCond[siteName] = data
        #Delay limit
        result = self.__opsHelper.getSections(self.__matchingDelaySection)
        sites = []
        if result['OK']:
            sites = result['Value']
        for siteName in sites:
            result = self.__getDelayCondition(siteName)
            if not result['OK']:
                continue
            data = result['Value']
            if not data:
                continue
            if siteName in negCond:
                negCond[siteName] = self.__mergeCond(negCond[siteName], data)
            else:
                negCond[siteName] = data
        orCond = []
        for siteName in negCond:
            negCond[siteName]['Site'] = siteName
            orCond.append(negCond[siteName])
        Limiter.__condCache.add("GLOBAL", 10, orCond)
        return orCond

    def getNegativeCondForSite(self, siteName):
        """ Generate a negative query based on the limits set on the site
    """
        # Check if Limits are imposed onto the site
        negativeCond = {}
        if self.checkJobLimit():
            result = self.__getRunningCondition(siteName)
            if result['OK']:
                negativeCond = result['Value']
            gLogger.verbose(
                'Negative conditions for site %s after checking limits are: %s'
                % (siteName, str(negativeCond)))

        if self.checkMatchingDelay():
            result = self.__getDelayCondition(siteName)
            if result['OK']:
                delayCond = result['Value']
                gLogger.verbose(
                    'Negative conditions for site %s after delay checking are: %s'
                    % (siteName, str(delayCond)))
                negativeCond = self.__mergeCond(negativeCond, delayCond)

        if negativeCond:
            gLogger.info('Negative conditions for site %s are: %s' %
                         (siteName, str(negativeCond)))

        return negativeCond

    def __mergeCond(self, negCond, addCond):
        """ Merge two negative dicts
    """
        #Merge both negative dicts
        for attr in addCond:
            if attr not in negCond:
                negCond[attr] = []
            for value in addCond[attr]:
                if value not in negCond[attr]:
                    negCond[attr].append(value)
        return negCond

    def __extractCSData(self, section):
        """ Extract limiting information from the CS in the form:
        { 'JobType' : { 'Merge' : 20, 'MCGen' : 1000 } }
    """
        stuffDict = Limiter.__csDictCache.get(section)
        if stuffDict:
            return S_OK(stuffDict)

        result = self.__opsHelper.getSections(section)
        if not result['OK']:
            return result
        attribs = result['Value']
        stuffDict = {}
        for attName in attribs:
            result = self.__opsHelper.getOptionsDict("%s/%s" %
                                                     (section, attName))
            if not result['OK']:
                return result
            attLimits = result['Value']
            try:
                attLimits = dict([(k, int(attLimits[k])) for k in attLimits])
            except Exception, excp:
                errMsg = "%s/%s has to contain numbers: %s" % (
                    section, attName, str(excp))
                gLogger.error(errMsg)
                return S_ERROR(errMsg)
            stuffDict[attName] = attLimits

        Limiter.__csDictCache.add(section, 300, stuffDict)
        return S_OK(stuffDict)
コード例 #30
0
ファイル: NotificationHandler.py プロジェクト: sparsh35/DIRAC
import six
from DIRAC import gConfig, gLogger, S_OK, S_ERROR

from DIRAC.Core.DISET.RequestHandler import RequestHandler
from DIRAC.Core.Utilities.Mail import Mail
from DIRAC.Core.Utilities.ThreadScheduler import gThreadScheduler
from DIRAC.Core.Security import Properties
from DIRAC.ConfigurationSystem.Client import PathFinder
from DIRAC.FrameworkSystem.DB.NotificationDB import NotificationDB
from DIRAC.Core.Utilities.DictCache import DictCache

__RCSID__ = "$Id$"

gNotDB = None
gMailSet = set()
gMailCache = DictCache()


def purgeDelayedEMails():
    """ Purges the emails accumulated in gMailSet
  """
    gMailSet.clear()


class NotificationHandler(RequestHandler):
    @classmethod
    def initializeHandler(cls, serviceInfo):
        """ Handler initialization
    """
        global gNotDB
        gNotDB = NotificationDB()