Пример #1
0
    def web_getSelectionData(self):

        sData = self.getSessionData()

        callback = {}

        user = sData["user"]["username"]
        if user == "Anonymous":
            self.finish({
                "success": "false",
                "error": "You are not authorize to access these data"
            })

        if len(self.request.arguments) > 0:
            tmp = {}
            for i in self.request.arguments:
                tmp[i] = str(self.request.arguments[i])
            callback["extra"] = tmp
        rpcClient = RPCClient("Framework/ProxyManager")
        retVal = yield self.threadTask(rpcClient.getContents, {}, [], 0, 0)
        if not retVal["OK"]:
            self.finish({"success": "false", "error": retVal["Message"]})
        data = retVal["Value"]
        users = []
        groups = []
        for record in data["Records"]:
            users.append(str(record[0]))
            groups.append(str(record[2]))
        users = uniqueElements(users)
        groups = uniqueElements(groups)
        users.sort()
        groups.sort()
        users = map(lambda x: [x], users)
        groups = map(lambda x: [x], groups)

        callback["username"] = users
        callback["usergroup"] = groups
        result = gConfig.getOption(
            "/WebApp/ProxyManagementMonitoring/TimeSpan",
            "86400,432000,604800,2592000")
        if result["OK"]:
            tmp = result["Value"]
            tmp = tmp.split(", ")
            if len(tmp) > 0:
                timespan = []
                for i in tmp:
                    human_readable = self.__humanize_time(i)
                    timespan.append([i, human_readable])
            else:
                timespan = [["Nothing to display"]]
        else:
            timespan = [["Error during RPC call"]]
        callback["expiredBefore"] = timespan
        callback["expiredAfter"] = timespan
        self.finish(callback)
Пример #2
0
  def web_getSelectionData( self ):

    sData = self.getSessionData()

    callback = {}

    user = sData["user"]["username"]
    if user == "Anonymous":
      self.finish( {"success":"false", "error":"You are not authorize to access these data"} )

    if len( self.request.arguments ) > 0:
      tmp = {}
      for i in self.request.arguments:
        tmp[i] = str( self.request.arguments[i] )
      callback["extra"] = tmp
    rpcClient = RPCClient( "Framework/ProxyManager" )
    retVal = yield self.threadTask( rpcClient.getContents, {}, [], 0, 0 )
    if not retVal[ "OK" ]:
      self.finish( {"success":"false", "error":retVal["Message"]} )
    data = retVal[ "Value" ]
    users = []
    groups = []
    for record in data[ "Records" ]:
      users.append( str( record[0] ) )
      groups.append( str( record[2] ) )
    users = uniqueElements( users )
    groups = uniqueElements( groups )
    users.sort()
    groups.sort()
    users = map( lambda x: [x], users )
    groups = map( lambda x: [x], groups )

    callback["username"] = users
    callback["usergroup"] = groups
    result = gConfig.getOption( "/WebApp/ProxyManagementMonitoring/TimeSpan", "86400,432000,604800,2592000" )
    if result["OK"]:
      tmp = result["Value"]
      tmp = tmp.split( ", " )
      if len( tmp ) > 0:
        timespan = []
        for i in tmp:
          human_readable = self.__humanize_time( i )
          timespan.append( [i, human_readable] )
      else:
        timespan = [["Nothing to display"]]
    else:
      timespan = [["Error during RPC call"]]
    callback["expiredBefore"] = timespan
    callback["expiredAfter"] = timespan
    self.finish( callback )
Пример #3
0
 def _prepareRemoteHost(self, host=None ):
   """ Prepare remote directories and upload control script 
   """
   
   ssh = SSH( host = host, parameters = self.ceParameters )
   
   # Make remote directories
   dirTuple = tuple ( uniqueElements( [self.sharedArea, 
                                       self.executableArea, 
                                       self.infoArea, 
                                       self.batchOutput, 
                                       self.batchError,
                                       self.workArea] ) )
   nDirs = len( dirTuple )
   cmd = 'mkdir -p %s; '*nDirs % dirTuple
   self.log.verbose( 'Creating working directories on %s' % self.ceParameters['SSHHost'] )
   result = ssh.sshCall( 30, cmd )
   if not result['OK']:
     self.log.warn( 'Failed creating working directories: %s' % result['Message'][1] )
     return result
   status,output,error = result['Value']
   if status == -1:
     self.log.warn( 'TImeout while creating directories' )
     return S_ERROR( 'TImeout while creating directories' )
   if "cannot" in output:
     self.log.warn( 'Failed to create directories: %s' % output )
     return S_ERROR( 'Failed to create directories: %s' % output )
   
   # Upload the control script now
   sshScript = os.path.join( rootPath, "DIRAC", "Resources", "Computing", "remote_scripts", self.controlScript )
   self.log.verbose( 'Uploading %s script to %s' % ( self.controlScript, self.ceParameters['SSHHost'] ) )
   result = ssh.scpCall( 30, sshScript, self.sharedArea )
   if not result['OK']:
     self.log.warn( 'Failed uploading control script: %s' % result['Message'][1] )
     return result
   status,output,error = result['Value']
   if status != 0:
     if status == -1:
       self.log.warn( 'Timeout while uploading control script' )
       return S_ERROR( 'Timeout while uploading control script' )
     else:  
       self.log.warn( 'Failed uploading control script: %s' % output )
       return S_ERROR( 'Failed uploading control script' )
     
   # Chmod the control scripts
   self.log.verbose( 'Chmod +x control script' )
   result = ssh.sshCall( 10, "chmod +x %s/%s" % ( self.sharedArea, self.controlScript ) )
   if not result['OK']:
     self.log.warn( 'Failed chmod control script: %s' % result['Message'][1] )
     return result
   status,output,error = result['Value']
   if status != 0:
     if status == -1:
       self.log.warn( 'Timeout while chmod control script' )
       return S_ERROR( 'Timeout while chmod control script' )
     else:  
       self.log.warn( 'Failed uploading chmod script: %s' % output )
       return S_ERROR( 'Failed uploading chmod script' )
   
   return S_OK()
Пример #4
0
def getCompatiblePlatforms( originalPlatforms ):
  """ Get a list of platforms compatible with the given list
  """
  if type( originalPlatforms ) == type( ' ' ):
    platforms = [originalPlatforms]
  else:
    platforms = list( originalPlatforms )

  platforms = list( platform.replace( ' ', '' ) for platform in platforms )

  result = gConfig.getOptionsDict( '/Resources/Computing/OSCompatibility' )
  if not ( result['OK'] and result['Value'] ):
    return S_ERROR( "OS compatibility info not found" )

  platformsDict = dict( [( k, v.replace( ' ', '' ).split( ',' ) ) for k, v in result['Value'].iteritems()] )
  for k, v in platformsDict.iteritems():
    if k not in v:
      v.append( k )

  resultList = list( platforms )
  for p in platforms:
    tmpList = platformsDict.get( p, [] )
    for pp in platformsDict:
      if p in platformsDict[pp]:
        tmpList.append( pp )
        tmpList += platformsDict[pp]
    if tmpList:
      resultList += tmpList

  return S_OK( uniqueElements( resultList ) )
Пример #5
0
    def _prepareHost(self):
        """ Prepare directories and copy control script 
    """

        # Make remote directories
        dirTuple = uniqueElements([
            self.sharedArea, self.executableArea, self.infoArea,
            self.batchOutput, self.batchError, self.workArea
        ])
        nDirs = len(dirTuple)
        cmdTuple = ['mkdir', '-p'] + dirTuple
        self.log.verbose('Creating working directories')
        result = systemCall(30, cmdTuple)
        if not result['OK']:
            self.log.warn('Failed creating working directories: %s' %
                          result['Message'][1])
            return result
        status, output, error = result['Value']
        if status != 0:
            self.log.warn('Failed to create directories: %s' % output)
            return S_ERROR('Failed to create directories: %s' % output)

        # copy the control script now
        localScript = os.path.join(rootPath, "DIRAC", "Resources", "Computing",
                                   "remote_scripts", self.controlScript)
        self.log.verbose('Copying %s script' % self.controlScript)
        try:
            shutil.copy(localScript, self.sharedArea)
            # Chmod the control scripts
            self.finalScript = os.path.join(self.sharedArea,
                                            self.controlScript)
            os.chmod(self.finalScript, 0o755)
        except Exception, x:
            self.log.warn('Failed copying control script', x)
            return S_ERROR(x)
Пример #6
0
  def getUserByProperty( self , prop = "NormalUser" ):

    """
    Get usernames based on group property
    Argument is a string. Return value is a list
    """

    groupList = list()
    result = gConfig.getSections( "/Registry/Groups" )
    gLogger.debug( "Group response: %s" % result )
    if not result[ "OK" ]:
      return groupList

    groups = result[ "Value" ]
    for j in groups:
      props = getProperties( j )
      gLogger.debug( "%s properties: %s" % ( j , props ) )
      if prop in props:
        groupList.append( j )

    if not len( groupList ) > 0:
      return groupList
    groupList = uniqueElements( groupList )
    gLogger.debug( "Chosen group(s): %s" % groupList )

    userList = list()
    for i in groupList:
      users = gConfig.getValue( "/Registry/Groups/%s/Users" % i , [] )
      gLogger.debug( "%s users: %s" % ( i , users ) )
      if len( users ) > 0:
        userList.extend( users )

    return userList
Пример #7
0
 def _prepareRemoteHost(self, host=None ):
   """ Prepare remote directories and upload control script 
   """
   
   ssh = SSH( host = host, parameters = self.ceParameters )
   
   # Make remote directories
   dirTuple = tuple ( uniqueElements( [self.sharedArea, 
                                       self.executableArea, 
                                       self.infoArea, 
                                       self.batchOutput, 
                                       self.batchError,
                                       self.workArea] ) )
   nDirs = len( dirTuple )
   cmd = 'mkdir -p %s; '*nDirs % dirTuple
   self.log.verbose( 'Creating working directories on %s' % self.ceParameters['SSHHost'] )
   result = ssh.sshCall( 30, cmd )
   if not result['OK']:
     self.log.warn( 'Failed creating working directories: %s' % result['Message'][1] )
     return result
   status,output,error = result['Value']
   if status == -1:
     self.log.warn( 'TImeout while creating directories' )
     return S_ERROR( 'TImeout while creating directories' )
   if "cannot" in output:
     self.log.warn( 'Failed to create directories: %s' % output )
     return S_ERROR( 'Failed to create directories: %s' % output )
   
   # Upload the control script now
   sshScript = os.path.join( rootPath, "DIRAC", "Resources", "Computing", "remote_scripts", self.controlScript )
   self.log.verbose( 'Uploading %s script to %s' % ( self.controlScript, self.ceParameters['SSHHost'] ) )
   result = ssh.scpCall( 30, sshScript, self.sharedArea )
   if not result['OK']:
     self.log.warn( 'Failed uploading control script: %s' % result['Message'][1] )
     return result
   status,output,error = result['Value']
   if status != 0:
     if status == -1:
       self.log.warn( 'Timeout while uploading control script' )
       return S_ERROR( 'Timeout while uploading control script' )
     else:  
       self.log.warn( 'Failed uploading control script: %s' % output )
       return S_ERROR( 'Failed uploading control script' )
     
   # Chmod the control scripts
   self.log.verbose( 'Chmod +x control script' )
   result = ssh.sshCall( 10, "chmod +x %s/%s" % ( self.sharedArea, self.controlScript ) )
   if not result['OK']:
     self.log.warn( 'Failed chmod control script: %s' % result['Message'][1] )
     return result
   status,output,error = result['Value']
   if status != 0:
     if status == -1:
       self.log.warn( 'Timeout while chmod control script' )
       return S_ERROR( 'Timeout while chmod control script' )
     else:  
       self.log.warn( 'Failed uploading chmod script: %s' % output )
       return S_ERROR( 'Failed uploading chmod script' )
   
   return S_OK()
Пример #8
0
    def getUserByProperty(self, prop="NormalUser"):
        """
    Get usernames based on group property
    Argument is a string. Return value is a list
    """

        groupList = list()
        result = gConfig.getSections("/Registry/Groups")
        gLogger.debug("Group response: %s" % result)
        if not result["OK"]:
            return groupList

        groups = result["Value"]
        for j in groups:
            props = getProperties(j)
            gLogger.debug("%s properties: %s" % (j, props))
            if prop in props:
                groupList.append(j)

        if not len(groupList) > 0:
            return groupList
        groupList = uniqueElements(groupList)
        gLogger.debug("Chosen group(s): %s" % groupList)

        userList = list()
        for i in groupList:
            users = gConfig.getValue("/Registry/Groups/%s/Users" % i, [])
            gLogger.debug("%s users: %s" % (i, users))
            if len(users) > 0:
                userList.extend(users)

        return userList
Пример #9
0
def getCompatiblePlatforms( originalPlatforms ):
  """ Get a list of platforms compatible with the given list
  """
  if type( originalPlatforms ) == type( ' ' ):
    platforms = [originalPlatforms]
  else:
    platforms = list( originalPlatforms )

  platformDict = {}
  result = gConfig.getOptionsDict( '/Resources/Computing/OSCompatibility' )
  if result['OK'] and result['Value']:
    platformDict = result['Value']
    for platform in platformDict:
      platformDict[platform] = [ x.strip() for x in platformDict[platform].split( ',' ) ]
  else:
    return S_ERROR( 'OS compatibility info not found' )

  resultList = list( platforms )
  for p in platforms:
    tmpList = platformDict.get( p, [] )
    for pp in platformDict:
      if p in platformDict[pp]:
        tmpList.append( pp )
        tmpList += platformDict[pp]
    if tmpList:
      resultList += tmpList

  return S_OK( uniqueElements( resultList ) )
Пример #10
0
def getCompatiblePlatforms(originalPlatforms):
    """ Get a list of platforms compatible with the given list 
  """
    if type(originalPlatforms) == type(' '):
        platforms = [originalPlatforms]
    else:
        platforms = list(originalPlatforms)

    platformDict = {}
    result = gConfig.getOptionsDict('/Resources/Computing/OSCompatibility')
    if result['OK'] and result['Value']:
        platformDict = result['Value']
        for platform in platformDict:
            platformDict[platform] = [
                x.strip() for x in platformDict[platform].split(',')
            ]
    else:
        return S_ERROR('OS compatibility info not found')

    resultList = list(platforms)
    for p in platforms:
        tmpList = platformDict.get(p, [])
        for pp in platformDict:
            if p in platformDict[pp]:
                tmpList.append(pp)
                tmpList += platformDict[pp]
        if tmpList:
            resultList += tmpList

    return S_OK(uniqueElements(resultList))
Пример #11
0
  def _prepareHost( self ):
    """ Prepare directories and copy control script 
    """

    # Make remote directories
    dirTuple = uniqueElements( [ self.sharedArea,
                                 self.executableArea,
                                 self.infoArea,
                                 self.batchOutput,
                                 self.batchError,
                                 self.workArea] )
    nDirs = len( dirTuple )
    cmdTuple = [ 'mkdir', '-p' ] + dirTuple
    self.log.verbose( 'Creating working directories' )
    result = systemCall( 30, cmdTuple )
    if not result['OK']:
      self.log.warn( 'Failed creating working directories: %s' % result['Message'][1] )
      return result
    status, output, error = result['Value']
    if status != 0:
      self.log.warn( 'Failed to create directories: %s' % output )
      return S_ERROR( 'Failed to create directories: %s' % output )

    # copy the control script now
    localScript = os.path.join( rootPath, "DIRAC", "Resources", "Computing", "remote_scripts", self.controlScript )
    self.log.verbose( 'Copying %s script' % self.controlScript )
    try:
      shutil.copy( localScript, self.sharedArea )
      # Chmod the control scripts
      self.finalScript = os.path.join( self.sharedArea, self.controlScript )
      os.chmod( self.finalScript, 0o755 )
    except Exception, x:
      self.log.warn( 'Failed copying control script', x )
      return S_ERROR( x )
Пример #12
0
def getCompatiblePlatforms(originalPlatforms):
    """ Get a list of platforms compatible with the given list
  """
    if isinstance(originalPlatforms, six.string_types):
        platforms = [originalPlatforms]
    else:
        platforms = list(originalPlatforms)

    platforms = list(platform.replace(' ', '') for platform in platforms)

    result = gConfig.getOptionsDict('/Resources/Computing/OSCompatibility')
    if not (result['OK'] and result['Value']):
        return S_ERROR("OS compatibility info not found")

    platformsDict = dict(
        (k, v.replace(' ', '').split(','))
        for k, v in result['Value'].items())  # can be an iterator
    for k, v in platformsDict.items():  # can be an iterator
        if k not in v:
            v.append(k)

    resultList = list(platforms)
    for p in platforms:
        tmpList = platformsDict.get(p, [])
        for pp in platformsDict:
            if p in platformsDict[pp]:
                tmpList.append(pp)
                tmpList += platformsDict[pp]
        if tmpList:
            resultList += tmpList

    return S_OK(uniqueElements(resultList))
Пример #13
0
 def __getSelectionData(self):
   callback = {}
   if not authorizeAction():
     return {"success":"false","error":"You are not authorize to access these data"}
   if len(request.params) > 0:
     tmp = {}
     for i in request.params:
       tmp[i] = str(request.params[i])
     callback["extra"] = tmp
   rpcClient = getRPCClient( "Framework/ProxyManager" )
   retVal = rpcClient.getContents( {}, [], 0, 0 )
   if not retVal[ "OK" ]:
     return {"success":"false","error":retVal["Message"]}
   data = retVal[ "Value" ]
   users = []
   groups = []
   for record in data[ "Records" ]:
     users.append( str(record[0]) )
     groups.append( str(record[2]) )
   users = uniqueElements(users)
   groups = uniqueElements(groups)
   users.sort()
   groups.sort()
   users = map(lambda x: [x], users)
   groups = map(lambda x: [x], groups)
   if len(users) > 1:
     users.insert(0,["All"])
   if len(groups) > 1:
     groups.insert(0,["All"])
   callback["username"] = users
   callback["usergroup"] = groups
   result = gConfig.getOption("/Website/ProxyManagementMonitoring/TimeSpan")
   if result["OK"]:
     tmp = result["Value"]
     tmp = tmp.split(", ")
     if len(tmp)>0:
       timespan = []
       for i in tmp:
         human_readable = self.__humanize_time(i)
         timespan.append([i, human_readable])
     else:
       timespan = [["Nothing to display"]]
   else:
     timespan = [["Error during RPC call"]]
   callback["expiredBefore"] = timespan
   callback["expiredAfter"] = timespan
   return callback
Пример #14
0
 def _getFileGroups( self, fileReplicas ):
   fileGroups = {}
   for lfn, replicas in fileReplicas.items():
     replicaSEs = str.join( ',', sortList( uniqueElements( replicas.keys() ) ) )
     if not fileGroups.has_key( replicaSEs ):
       fileGroups[replicaSEs] = []
     fileGroups[replicaSEs].append( lfn )
   return fileGroups
Пример #15
0
  def web_sendMessage(self):

    """
    Send message(not implemented yet) or email getting parameters from request
    """

    email = self.getUserEmail()
    
    if not "subject" in self.request.arguments:
      result = "subject parameter is not in request... aborting"
      gLogger.debug(result)
      self.write({ "success" : "false" , "error" : result })
      return
    
    subject = self.checkUnicode(self.request.arguments[ "subject" ][0])
    if not len(subject) > 0:
      subject = "Message from %s" % email

    if not "message" in self.request.arguments:
      result = "msg parameter is not in request... aborting"
      gLogger.debug(result)
      self.write({ "success" : "false" , "error" : result })
      return
    
    body = self.checkUnicode(self.request.arguments[ "message" ][0])
    if not len(body) > 0:
      result = "Message body has zero length... aborting"
      gLogger.debug(result)
      self.write({ "success" : "false" , "error" : result })
      return

    users = self.request.arguments[ "users" ][0].split(",")

    groups = self.request.arguments[ "groups" ][0].split(",")
    
    gLogger.info("List of groups from request: %s" % groups)
    if groups:
      for g in groups:
        userList = self.getUsersFromGroup(g)
        gLogger.info("Get users: %s from group %s" % (userList , g))
        if userList:
          users.extend(userList)
          
    gLogger.info("Merged list of users from users and group %s" % users)

    if not len(users) > 0:
      error = "Length of list of recipients is zero size"
      gLogger.info(error)
      self.write({ "success" : "false" , "error" : error })
      return
      
    users = uniqueElements(users)
    gLogger.info("Final list of users to send message/mail: %s" % users)
    
    sendDict = self.getMailDict(users)
    self.write(self.sendMail(sendDict , subject , body , email))
Пример #16
0
  def __sendMessage( self ):

    """
    Send message(not implemented yet) or email getting parameters from request
    """

    getEmail = self.__returnEmail()
    if not "result" in getEmail:
      return getEmail
    email = getEmail[ "result" ]

    if not "subject" in request.params:
      result = "subject parameter is not in request... aborting"
      gLogger.debug( result )
      return { "success" : "false" , "error" : result }
    subject = GeneralController().checkUnicode( request.params[ "subject" ] )
    if not len( subject ) > 0:
      subject = "Message from %s" % email

    if not "msg" in request.params:
      result = "msg parameter is not in request... aborting"
      gLogger.debug( result )
      return { "success" : "false" , "error" : result }
    body = GeneralController().checkUnicode( request.params[ "msg" ] )
    if not len( body ) > 0:
      result = "Message body has zero length... aborting"
      gLogger.debug( result )
      return { "success" : "false" , "error" : result }

    users = list()
    userList = GeneralController().userlistFromRequest()
    gLogger.info( "List of users from request: %s" % userList )
    if userList:
      users.extend( userList )

    groupList = GeneralController().grouplistFromRequest()
    gLogger.info( "List of groups from request: %s" % groupList )
    if groupList:
      for i in groupList:
        userList = GeneralController().userlistFromGroup( i )
        gLogger.info( "Get users: %s from group %s" % ( userList , i ) )
        if userList:
          users.extend( userList )
    gLogger.info( "Merged list of users from users and group %s" % users )

    if not len( users ) > 0:
      error = "Length of list of recipients is zero size"
      gLogger.info( error )
      return { "success" : "false" , "error" : error }
    users = uniqueElements( users )
    gLogger.info( "Final list of users to send message/mail: %s" % users )
    
    if "email" in request.params:
      sendDict = GeneralController().getMailDict( users )
      return GeneralController().sendMail( sendDict , subject , body , email )
    return { "success" : "false" , "error" : result }
Пример #17
0
    def web_sendMessage(self):
        """
    Send message(not implemented yet) or email getting parameters from request
    """

        email = self.getUserEmail()

        if not "subject" in self.request.arguments:
            result = "subject parameter is not in request... aborting"
            gLogger.debug(result)
            self.write({"success": "false", "error": result})
            return

        subject = self.checkUnicode(self.request.arguments["subject"][0])
        if not subject:
            subject = "Message from %s" % email

        if not "message" in self.request.arguments:
            result = "msg parameter is not in request... aborting"
            gLogger.debug(result)
            self.write({"success": "false", "error": result})
            return

        body = self.checkUnicode(self.request.arguments["message"][0])
        if not len(body) > 0:
            result = "Message body has zero length... aborting"
            gLogger.debug(result)
            self.write({"success": "false", "error": result})
            return

        users = self.request.arguments["users"][0].split(",")

        groups = self.request.arguments["groups"][0].split(",")

        gLogger.info("List of groups from request: %s" % groups)
        if groups:
            for g in groups:
                userList = self.getUsersFromGroup(g)
                gLogger.info("Get users: %s from group %s" % (userList, g))
                if userList:
                    users.extend(userList)

        gLogger.info("Merged list of users from users and group %s" % users)

        if not len(users) > 0:
            error = "Length of list of recipients is zero size"
            gLogger.info(error)
            self.write({"success": "false", "error": error})
            return

        users = uniqueElements(users)
        gLogger.info("Final list of users to send message/mail: %s" % users)

        sendDict = self.getMailDict(users)
        self.write(self.sendMail(sendDict, subject, body, email))
Пример #18
0
 def _getFileGroups(cls, fileReplicas):
     """ get file groups dictionary { "SE1,SE2,SE3" : [ lfn1, lfn2 ], ... }
 
 :param dict fileReplicas: { lfn : [SE1, SE2, SE3], ... }
 """
     fileGroups = {}
     for lfn, replicas in fileReplicas.items():
         replicaSEs = ",".join(sortList(uniqueElements(replicas)))
         if replicaSEs not in fileGroups:
             fileGroups[replicaSEs] = []
         fileGroups[replicaSEs].append(lfn)
     return fileGroups
Пример #19
0
 def _getFileGroups( cls, fileReplicas ):
   """ get file groups dictionary { "SE1,SE2,SE3" : [ lfn1, lfn2 ], ... }
   
   :param dict fileReplicas: { lfn : [SE1, SE2, SE3], ... }
   """
   fileGroups = {}
   for lfn, replicas in fileReplicas.items():
     replicaSEs = ",".join( sortList( uniqueElements( replicas ) ) )
     if replicaSEs not in fileGroups:
       fileGroups[replicaSEs] = []
     fileGroups[replicaSEs].append( lfn )
   return fileGroups
Пример #20
0
 def __getLaunchpadOpts(self):
   gLogger.info( "start __getLaunchpadOpts" )
   delimiter = gConfig.getValue( "/Website/Launchpad/ListSeparator" , ',' )
   options = self.__getOptionsFromCS( delimiter = delimiter)
   platform = self.__getPlatform()
   if platform and options:
     if not options.has_key( "Platform" ):
       options[ "Platform" ] = platform
     else:
       csPlatform = list( options[ "Platform" ] )
       allPlatforms = csPlatform + platform
       platform = uniqueElements( allPlatforms )
       options[ "Platform" ] = platform
   gLogger.debug( "Combined options from CS: %s" % options )
   override = gConfig.getValue( "/Website/Launchpad/OptionsOverride" , False)
   gLogger.info( "end __getLaunchpadOpts" )
   return {"success":"true","result":options,"override":override,"separator":delimiter}
Пример #21
0
 def __getLayout( self ) :
   gLogger.info( "Running getLayout()" )
   msg = "getLayout() for %s@%s" % ( getUsername() , getSelectedGroup() )
   upc = UserProfileClient( USER_PROFILE_NAME, getRPCClient )
   result = upc.listAvailableVars()
   gLogger.debug( result )
   if not result[ "OK" ]:
     gLogger.error( "Result %s: %s" % ( msg , result[ "Message" ] ) )
     return { "success" : "false" , "error" : result[ "Message" ] }
   result = result[ "Value" ]
   gLogger.always( "array2obj" )
   availble = map( self.__array2obj , result )
   gLogger.always( availble )
   users = list()
   for i in result :
     if len( i ) > 1 :
       users.append( { "user" : i[ 0 ] } )
   users = uniqueElements( users )
   gLogger.info( "Result %s: %s AND %s" % ( msg , availble , users ) )
   return { "success" : "true" , "result" : availble , "users" : users }
Пример #22
0
 def __getLayout(self):
     gLogger.info("Running getLayout()")
     msg = "getLayout() for %s@%s" % (getUsername(), getSelectedGroup())
     upc = UserProfileClient(USER_PROFILE_NAME, getRPCClient)
     result = upc.listAvailableVars()
     gLogger.debug(result)
     if not result["OK"]:
         gLogger.error("Result %s: %s" % (msg, result["Message"]))
         return {"success": "false", "error": result["Message"]}
     result = result["Value"]
     gLogger.always("array2obj")
     availble = map(self.__array2obj, result)
     gLogger.always(availble)
     users = list()
     for i in result:
         if len(i) > 1:
             users.append({"user": i[0]})
     users = uniqueElements(users)
     gLogger.info("Result %s: %s AND %s" % (msg, availble, users))
     return {"success": "true", "result": availble, "users": users}
Пример #23
0
 def layoutUser(self):
   upProfileName = "Summary"
   upc = UserProfileClient( "Summary", getRPCClient )
   result = upc.listAvailableVars()
   if result["OK"]:
     result = result["Value"]
     userList = []
     for i in result:
       userList.append(i[0])
     userList = uniqueElements(userList)
     resultList = []
     for j in userList:
       resultList.append({'name':j})
     total = len(resultList)
     resultList.sort()
     resultList.insert(0,{'name':'All'})
     c.result = {"success":"true","result":resultList,"total":total}
   else:
     c.result = {"success":"false","error":result["Message"]}
   return c.result
Пример #24
0
    def _prepareHost(self):
        """Prepare directories and copy control script"""

        # Make remote directories
        dirTuple = uniqueElements([
            self.sharedArea, self.executableArea, self.infoArea,
            self.batchOutput, self.batchError, self.workArea
        ])
        cmdTuple = ["mkdir", "-p"] + dirTuple
        self.log.verbose("Creating working directories")
        result = systemCall(30, cmdTuple)
        if not result["OK"]:
            self.log.error("Failed creating working directories",
                           "(%s)" % result["Message"][1])
            return result
        status, output, error = result["Value"]
        if status != 0:
            self.log.error("Failed to create directories", "(%s)" % error)
            return S_ERROR(errno.EACCES, "Failed to create directories")

        return S_OK()
Пример #25
0
 def __setHistory( self , item , state ):
   """
   Insert item to  Load or Save history list in first position and checking for
   duplications.
   Return resulting list
   "item" is a dict
   "state" should be either "Save" or "Load" but can be any other value
   """
   gLogger.info( "Running setHistory( %s , %s )" % ( item , state ) )
   msg = "setHistory() for %s@%s" % ( getUsername() , getSelectedGroup() )
   opt = "/Website/" + USER_PROFILE_NAME + "/ShowHistory"
   history_length = gConfig.getOptions( opt , 5 )
   upc = UserProfileClient( "Default" , getRPCClient )
   group = str( getSelectedGroup() )
   profile_name = USER_PROFILE_NAME + ".History." + state + "." + group
   result = upc.retrieveVar( profile_name )
   gLogger.info( result )
   if not result[ "OK" ]:
     if result[ "Message" ].find( "No data" ) < 0 :
       gLogger.error( "Result %s: %s" % ( msg , result[ "Message" ] ) )
       return S_ERROR( result[ "Message" ] )
     history = list()
   else:
     history = result[ "Value" ]
   if not isinstance( history , list ):
     err = "List expected at: %s" % profile_name
     gLogger.error( "Result %s: %s" % ( msg , err ) )
     return S_ERROR( err )
   if( len( history ) > history_length ):
     history = result[ history_length ]
   history.insert( 0 , item )
   history = uniqueElements( history )
   gLogger.error( "History: %s" % history )
   result = upc.storeVar( profile_name , history )
   gLogger.info( result )
   if not result[ "OK" ]:
     gLogger.error( "Result %s: %s" % ( msg , result[ "Message" ] ) )
     return S_ERROR( result[ "Message" ] )
   gLogger.info( "Result %s: %s" % ( msg , history ) )
   return S_OK( history )
Пример #26
0
 def __setHistory(self, item, state):
     """
 Insert item to  Load or Save history list in first position and checking for
 duplications.
 Return resulting list
 "item" is a dict
 "state" should be either "Save" or "Load" but can be any other value
 """
     gLogger.info("Running setHistory( %s , %s )" % (item, state))
     msg = "setHistory() for %s@%s" % (getUsername(), getSelectedGroup())
     opt = "/Website/" + USER_PROFILE_NAME + "/ShowHistory"
     history_length = gConfig.getOptions(opt, 5)
     upc = UserProfileClient("Default", getRPCClient)
     group = str(getSelectedGroup())
     profile_name = USER_PROFILE_NAME + ".History." + state + "." + group
     result = upc.retrieveVar(profile_name)
     gLogger.info(result)
     if not result["OK"]:
         if result["Message"].find("No data") < 0:
             gLogger.error("Result %s: %s" % (msg, result["Message"]))
             return S_ERROR(result["Message"])
         history = list()
     else:
         history = result["Value"]
     if not isinstance(history, list):
         err = "List expected at: %s" % profile_name
         gLogger.error("Result %s: %s" % (msg, err))
         return S_ERROR(err)
     if (len(history) > history_length):
         history = result[history_length]
     history.insert(0, item)
     history = uniqueElements(history)
     gLogger.error("History: %s" % history)
     result = upc.storeVar(profile_name, history)
     gLogger.info(result)
     if not result["OK"]:
         gLogger.error("Result %s: %s" % (msg, result["Message"]))
         return S_ERROR(result["Message"])
     gLogger.info("Result %s: %s" % (msg, history))
     return S_OK(history)
Пример #27
0
def _getPoolCatalogs(directory=""):

    patterns = ["*.xml", "*.xml*gz"]
    omissions = [r"\.bak$"]  # to be ignored for production files

    # First obtain valid list of unpacked catalog files in directory
    poolCatalogList = []

    for pattern in patterns:
        fileList = glob.glob(os.path.join(directory, pattern))
        for fname in fileList:
            if fname.endswith(".bak"):
                gLogger.verbose("Ignoring BAK file: %s" % fname)
            elif tarfile.is_tarfile(fname):
                gLogger.debug("Unpacking catalog XML file %s" %
                              (os.path.join(directory, fname)))
                with tarfile.open(os.path.join(directory, fname), "r") as tf:
                    for member in tf.getmembers():
                        tf.extract(member, directory)
                        poolCatalogList.append(
                            os.path.join(directory, member.name))
            else:
                poolCatalogList.append(fname)

    poolCatalogList = uniqueElements(poolCatalogList)

    # Now have list of all XML files but some may not be Pool XML catalogs...
    finalCatList = []
    for possibleCat in poolCatalogList:
        try:
            _cat = PoolXMLCatalog(possibleCat)
            finalCatList.append(possibleCat)
        except Exception as x:
            gLogger.debug("Ignoring non-POOL catalogue file %s" % possibleCat)

    gLogger.debug("Final list of catalog files are: %s" %
                  ", ".join(finalCatList))

    return finalCatList
Пример #28
0
    def _prepareHost(self):
        """ Prepare directories and copy control script
    """

        # Make remote directories
        dirTuple = uniqueElements([
            self.sharedArea, self.executableArea, self.infoArea,
            self.batchOutput, self.batchError, self.workArea
        ])
        cmdTuple = ['mkdir', '-p'] + dirTuple
        self.log.verbose('Creating working directories')
        result = systemCall(30, cmdTuple)
        if not result['OK']:
            self.log.warn('Failed creating working directories: %s' %
                          result['Message'][1])
            return result
        status, output, _error = result['Value']
        if status != 0:
            self.log.warn('Failed to create directories: %s' % output)
            return S_ERROR('Failed to create directories: %s' % output)

        return S_OK()
Пример #29
0
  def _prepareHost( self ):
    """ Prepare directories and copy control script
    """

    # Make remote directories
    dirTuple = uniqueElements( [ self.sharedArea,
                                 self.executableArea,
                                 self.infoArea,
                                 self.batchOutput,
                                 self.batchError,
                                 self.workArea] )
    cmdTuple = [ 'mkdir', '-p' ] + dirTuple
    self.log.verbose( 'Creating working directories' )
    result = systemCall( 30, cmdTuple )
    if not result['OK']:
      self.log.warn( 'Failed creating working directories: %s' % result['Message'][1] )
      return result
    status, output, _error = result['Value']
    if status != 0:
      self.log.warn( 'Failed to create directories: %s' % output )
      return S_ERROR( 'Failed to create directories: %s' % output )

    return S_OK()
Пример #30
0
 def layoutUser(self):
     upProfileName = "Summary"
     upc = UserProfileClient("Summary", getRPCClient)
     result = upc.listAvailableVars()
     if result["OK"]:
         result = result["Value"]
         userList = []
         for i in result:
             userList.append(i[0])
         userList = uniqueElements(userList)
         resultList = []
         for j in userList:
             resultList.append({'name': j})
         total = len(resultList)
         resultList.sort()
         resultList.insert(0, {'name': 'All'})
         c.result = {
             "success": "true",
             "result": resultList,
             "total": total
         }
     else:
         c.result = {"success": "false", "error": result["Message"]}
     return c.result
Пример #31
0
def _getPoolCatalogs(directory=""):

    patterns = ["*.xml", "*.xml*gz"]
    omissions = ["\.bak$"]  # to be ignored for production files

    # First obtain valid list of unpacked catalog files in directory
    poolCatalogList = []

    for pattern in patterns:
        fileList = glob.glob(os.path.join(directory, pattern))
        for fname in fileList:
            if fname.endswith(".bak"):
                gLogger.verbose("Ignoring BAK file: %s" % fname)
            elif tarfile.is_tarfile(fname):
                gLogger.debug("Unpacking catalog XML file %s" % (os.path.join(directory, fname)))
                with tarfile.open(os.path.join(directory, fname), "r") as tf:
                    for member in tf.getmembers():
                        tf.extract(member, directory)
                        poolCatalogList.append(os.path.join(directory, member.name))
            else:
                poolCatalogList.append(fname)

    poolCatalogList = uniqueElements(poolCatalogList)

    # Now have list of all XML files but some may not be Pool XML catalogs...
    finalCatList = []
    for possibleCat in poolCatalogList:
        try:
            _cat = PoolXMLCatalog(possibleCat)
            finalCatList.append(possibleCat)
        except Exception as x:
            gLogger.debug("Ignoring non-POOL catalogue file %s" % possibleCat)

    gLogger.debug("Final list of catalog files are: %s" % ", ".join(finalCatList))

    return finalCatList
Пример #32
0
    def _prepareRemoteHost(self, host=None):
        """ Prepare remote directories and upload control script
    """

        ssh = SSH(host=host, parameters=self.ceParameters)

        # Make remote directories
        dirTuple = tuple(
            uniqueElements([
                self.sharedArea, self.executableArea, self.infoArea,
                self.batchOutput, self.batchError, self.workArea
            ]))
        nDirs = len(dirTuple)
        cmd = 'mkdir -p %s; ' * nDirs % dirTuple
        cmd = "bash -c '%s'" % cmd
        self.log.verbose('Creating working directories on %s' %
                         self.ceParameters['SSHHost'])
        result = ssh.sshCall(30, cmd)
        if not result['OK']:
            self.log.warn('Failed creating working directories: %s' %
                          result['Message'][1])
            return result
        status, output, _error = result['Value']
        if status == -1:
            self.log.warn('Timeout while creating directories')
            return S_ERROR('Timeout while creating directories')
        if "cannot" in output:
            self.log.warn('Failed to create directories: %s' % output)
            return S_ERROR('Failed to create directories: %s' % output)

        # Upload the control script now
        batchSystemDir = os.path.join(rootPath, "DIRAC", "Resources",
                                      "Computing", "BatchSystems")
        batchSystemScript = os.path.join(batchSystemDir,
                                         '%s.py' % self.batchSystem)
        batchSystemExecutor = os.path.join(batchSystemDir, 'executeBatch.py')
        self.log.verbose('Uploading %s script to %s' %
                         (self.batchSystem, self.ceParameters['SSHHost']))
        remoteScript = '%s/execute_batch' % self.sharedArea
        result = ssh.scpCall(30,
                             '%s %s' %
                             (batchSystemScript, batchSystemExecutor),
                             remoteScript,
                             postUploadCommand='chmod +x %s' % remoteScript)
        if not result['OK']:
            self.log.warn('Failed uploading control script: %s' %
                          result['Message'][1])
            return result
        status, output, _error = result['Value']
        if status != 0:
            if status == -1:
                self.log.warn('Timeout while uploading control script')
                return S_ERROR('Timeout while uploading control script')
            else:
                self.log.warn('Failed uploading control script: %s' % output)
                return S_ERROR('Failed uploading control script')

        # Chmod the control scripts
        #self.log.verbose( 'Chmod +x control script' )
        #result = ssh.sshCall( 10, "chmod +x %s/%s" % ( self.sharedArea, self.controlScript ) )
        #if not result['OK']:
        #  self.log.warn( 'Failed chmod control script: %s' % result['Message'][1] )
        #  return result
        #status, output, _error = result['Value']
        #if status != 0:
        #  if status == -1:
        #    self.log.warn( 'Timeout while chmod control script' )
        #    return S_ERROR( 'Timeout while chmod control script' )
        #  else:
        #    self.log.warn( 'Failed uploading chmod script: %s' % output )
        #    return S_ERROR( 'Failed uploading chmod script' )

        return S_OK()
Пример #33
0
  def registerUser(self,paramcopy):
# Unfortunately there is no way to get rid of empty text values in JS, so i have to hardcode it on server side. Hate it!
    default_values = ["John Smith","jsmith","*****@*****.**","+33 9 10 00 10 00","Select prefered virtual organization(s)"]
    default_values.append("Select your country")
    default_values.append("Any additional information you want to provide to administrators")
    dn = getUserDN()
    username = getUsername()
    if not username == "anonymous":
      return {"success":"false","error":"You are already registered in DIRAC with username: %s" % username}
    else:
      if not dn:
        return {"success":"false","error":"You have to load certificate to your browser before trying to register"}
    body = ""
    userMail = False
    vo = []
    for i in paramcopy:
      if not paramcopy[i] in default_values:
        if i == "email":
          userMail = paramcopy[i]
        if i == "vo":
          vo = paramcopy[i].split(",")
        body = body + str(i) + ' - "' + str(paramcopy[i]) + '"\n'
    if not userMail:
      return {"success":"false","error":"Can not get your email from the request"}
    gLogger.info("!!! VO: ",vo)
# TODO Check for previous requests
    if not len(vo) > 0:
      mails = gConfig.getValue("/Website/UserRegistrationEmail",[])
    else:
      mails = []
      for i in vo:
        i = i.strip()
        voadm = gConfig.getValue("/Registry/VO/%s/VOAdmin" % i,"")
        failsafe = False
        if voadm:
          tmpmail = gConfig.getValue("/Registry/Users/%s/Email" % voadm,"")
          if tmpmail:
            mails.append(tmpmail)
          else:
            gLogger.error("Can not find value for option /Registry/Users/%s/Email Trying failsafe option" % voadm)
            failsafe = True
        else:
          gLogger.error("Can not find value for option /Registry/VO/%s/VOAdmin Trying failsafe option" % i)
          failsafe = True
        if failsafe:
          failsafe = gConfig.getValue("/Website/UserRegistrationEmail",[])
          if len(failsafe) > 0:
            for j in failsafe:
              mails.append(j)
          else:
              gLogger.error("Can not find value for failsafe option /Website/UserRegistrationEmail User registration for VO %s is failed" % i)
    mails = uniqueElements(mails)
    if not len(mails) > 0:
      groupList = list()
      allGroups = gConfig.getSections("/Registry/Groups")
      if not allGroups["OK"]:
        return {"success":"false","error":"No groups found at this DIRAC installation"}
      allGroups = allGroups["Value"]
      for j in allGroups:
        props = getProperties(j)
        if "UserAdministrator" in props: # property which usd for user administration
          groupList.append(j)
      groupList = uniqueElements(groupList)
      if not len(groupList) > 0:
        return {"success":"false","error":"No groups, resposible for user administration, found"}
      userList = list()
      for i in groupList:
        users = gConfig.getValue("/Registry/Groups/%s/Users" % i,[])
        for j in users:
          userList.append(j)
      userList = uniqueElements(userList)
      if not len(userList) > 0:
        return {"success":"false","error":"Can not find a person resposible for user administration, your request can not be approuved"}
      mails = list()
      mail2name = dict()
      for i in userList:
        tmpmail = gConfig.getValue("/Registry/Users/%s/Email" % i,"")
        if tmpmail:
          mails.append(tmpmail)
        else:
          gLogger.error("Can not find value for option /Registry/Users/%s/Email" % i)
      mails = uniqueElements(mails)
      if not len(mails) > 0:
        return {"success":"false","error":"Can not find an email of the person resposible for the users administration, your request can not be approuved"}
    gLogger.info("Admins emails: ",mails)
    if not len(mails) > 0:
      return {"success":"false","error":"Can not find any emails of DIRAC Administrators"}
    allUsers = gConfig.getSections("/Registry/Users")
    if not allUsers["OK"]:
      return {"success":"false","error":"No users found at this DIRAC installation"}
    allUsers = allUsers["Value"]
    mail2name = dict()
    for i in allUsers:
      tmpmail = gConfig.getValue("/Registry/Users/%s/Email" % i,"")
      if tmpmail in mails:
        mail2name[tmpmail] = gConfig.getValue("/Registry/Users/%s/FullName" % i,i)
    sentFailed = list()
    sentSuccess = list()
    errorMessage = list()
    ntc = NotificationClient( getRPCClient )
    for i in mails:
      i = i.strip()
      result = ntc.sendMail(i,"New user has registered",body,userMail,False)
      if not result["OK"]:
        sentFailed.append(mail2name[i])
        errorMessage.append(result["Message"])
      else:
        sentSuccess.append(mail2name[i])
    gLogger.info("Sent success: ",sentSuccess)
    gLogger.info("Sent failure: ",sentFailed)
    errorMessage = uniqueElements(errorMessage)
    if len(sentSuccess) == 0:
      if not len(errorMessage) > 0:
        return {"success":"false","error":"No messages were sent to administrators due techincal reasons"}
      errorMessage = ", ".join(errorMessage)
      return {"success":"false","error":errorMessage}
    sName = ", ".join(sentSuccess)
    fName = ", ".join(sentFailed)
    if len(sentFailed) > 0:
      return {"success":"true","result":"Your registration request were sent successfuly to %s. Failed to sent request to %s." % (sName, fName)}
    return {"success":"true","result":"Your registration request were sent successfuly to %s." % sName}
Пример #34
0
  def _toJDL( self, xmlFile = '', jobDescriptionObject = None ):  # messy but need to account for xml file being in /tmp/guid dir
    """Creates a JDL representation of itself as a Job.
    """
    #Check if we have to do old bootstrap...
    classadJob = ClassAd( '[]' )

    paramsDict = {}
    params = self.workflow.parameters # ParameterCollection object

    paramList = params
    for param in paramList:
      paramsDict[param.getName()] = {'type':param.getType(), 'value':param.getValue()}

    arguments = []
    scriptName = 'jobDescription.xml'

    if jobDescriptionObject is None:
      # if we are here it's because there's a real file, on disk, that is named 'jobDescription.xml'
      if self.script:
        if os.path.exists( self.script ):
          scriptName = os.path.abspath( self.script )
          self.log.verbose( 'Found script name %s' % scriptName )
        else:
          self.log.error( "File not found", self.script )
      else:
        if xmlFile:
          self.log.verbose( 'Found XML File %s' % xmlFile )
          scriptName = xmlFile
      self.addToInputSandbox.append( scriptName )

    elif isinstance( jobDescriptionObject, StringIO.StringIO ):
      self.log.verbose( "jobDescription is passed in as a StringIO object" )

    else:
      self.log.error( "Where's the job description?" )

    arguments.append( os.path.basename( scriptName ) )
    if paramsDict.has_key( 'LogLevel' ):
      if paramsDict['LogLevel']['value']:
        arguments.append( '-o LogLevel=%s' % ( paramsDict['LogLevel']['value'] ) )
      else:
        self.log.warn( 'Job LogLevel defined with null value' )
    if paramsDict.has_key( 'DIRACSetup' ):
      if paramsDict['DIRACSetup']['value']:
        arguments.append( '-o DIRAC/Setup=%s' % ( paramsDict['DIRACSetup']['value'] ) )
      else:
        self.log.warn( 'Job DIRACSetup defined with null value' )
    if paramsDict.has_key( 'JobMode' ):
      if paramsDict['JobMode']['value']:
        arguments.append( '-o JobMode=%s' % ( paramsDict['JobMode']['value'] ) )
      else:
        self.log.warn( 'Job Mode defined with null value' )
    if paramsDict.has_key( 'JobConfigArgs' ):
      if paramsDict['JobConfigArgs']['value']:
        arguments.append( '%s' % ( paramsDict['JobConfigArgs']['value'] ) )
      else:
        self.log.warn( 'JobConfigArgs defined with null value' )
    if self.parametricWFArguments:
      for name, value in self.parametricWFArguments.items():
        arguments.append( "-p %s='%s'" % ( name, value ) )

    classadJob.insertAttributeString( 'Executable', self.executable )
    self.addToOutputSandbox.append( self.stderr )
    self.addToOutputSandbox.append( self.stdout )

    #Extract i/o sandbox parameters from steps and any input data parameters
    #to do when introducing step-level api...

    #To add any additional files to input and output sandboxes
    if self.addToInputSandbox:
      extraFiles = ';'.join( self.addToInputSandbox )
      if paramsDict.has_key( 'InputSandbox' ):
        currentFiles = paramsDict['InputSandbox']['value']
        finalInputSandbox = currentFiles + ';' + extraFiles
        uniqueInputSandbox = uniqueElements( finalInputSandbox.split( ';' ) )
        paramsDict['InputSandbox']['value'] = ';'.join( uniqueInputSandbox )
        self.log.verbose( 'Final unique Input Sandbox %s' % ( ';'.join( uniqueInputSandbox ) ) )
      else:
        paramsDict['InputSandbox'] = {}
        paramsDict['InputSandbox']['value'] = extraFiles
        paramsDict['InputSandbox']['type'] = 'JDL'

    if self.addToOutputSandbox:
      extraFiles = ';'.join( self.addToOutputSandbox )
      if paramsDict.has_key( 'OutputSandbox' ):
        currentFiles = paramsDict['OutputSandbox']['value']
        finalOutputSandbox = currentFiles + ';' + extraFiles
        uniqueOutputSandbox = uniqueElements( finalOutputSandbox.split( ';' ) )
        paramsDict['OutputSandbox']['value'] = ';'.join( uniqueOutputSandbox )
        self.log.verbose( 'Final unique Output Sandbox %s' % ( ';'.join( uniqueOutputSandbox ) ) )
      else:
        paramsDict['OutputSandbox'] = {}
        paramsDict['OutputSandbox']['value'] = extraFiles
        paramsDict['OutputSandbox']['type'] = 'JDL'

    if self.addToInputData:
      extraFiles = ';'.join( self.addToInputData )
      if paramsDict.has_key( 'InputData' ):
        currentFiles = paramsDict['InputData']['value']
        finalInputData = extraFiles
        if currentFiles:
          finalInputData = currentFiles + ';' + extraFiles
        uniqueInputData = uniqueElements( finalInputData.split( ';' ) )
        paramsDict['InputData']['value'] = ';'.join( uniqueInputData )
        self.log.verbose( 'Final unique Input Data %s' % ( ';'.join( uniqueInputData ) ) )
      else:
        paramsDict['InputData'] = {}
        paramsDict['InputData']['value'] = extraFiles
        paramsDict['InputData']['type'] = 'JDL'

    # Handle parameter sequences
    if self.numberOfParameters > 0:
      paramsDict, arguments = self._handleParameterSequences( paramsDict, arguments )

    classadJob.insertAttributeString( 'Arguments', ' '.join( arguments ) )

    #Add any JDL parameters to classad obeying lists with ';' rule
    for name, props in paramsDict.iteritems():
      ptype = props['type']
      value = props['value']
      if isinstance( value, basestring) and re.search( ';', value ):
        value = value.split( ';' )
      if name.lower() == 'requirements' and ptype == 'JDL':
        self.log.verbose( 'Found existing requirements: %s' % ( value ) )

      if re.search( '^JDL', ptype ):
        if isinstance( value, list ):
          if isinstance( value[0], list ):
            classadJob.insertAttributeVectorStringList( name, value )
          else:
            classadJob.insertAttributeVectorInt( name, value )
        elif isinstance( value, basestring ) and value:
          classadJob.insertAttributeInt( name, value )
        elif isinstance( value, ( int, long, float ) ):
          classadJob.insertAttributeInt( name, value )

    if self.numberOfParameters > 0:
      classadJob.insertAttributeInt( 'Parameters', self.numberOfParameters )

    for fToBeRemoved in [scriptName, self.stdout, self.stderr]:
      try:
        self.addToInputSandbox.remove( fToBeRemoved )
      except ValueError:
        pass

    jdl = classadJob.asJDL()
    start = jdl.find( '[' )
    end = jdl.rfind( ']' )
    return jdl[( start + 1 ):( end - 1 )]
Пример #35
0
Файл: Job.py Проект: Eo300/DIRAC
  def _toJDL(self, xmlFile='', jobDescriptionObject=None):
    """ Creates a JDL representation of itself as a Job.

       Example usage:

       >>> job = Job()
       >>> job._toJDL()

       :param xmlFile: location of the XML file
       :type xmlFile: str
       :param jobDescriptionObject: if not None, it must be a StringIO object
       :type jobDescriptionObject: StringIO

       :returns: JDL (str)
    """
    # Check if we have to do old bootstrap...
    classadJob = ClassAd('[]')

    paramsDict = {}
    params = self.workflow.parameters  # ParameterCollection object

    paramList = params
    for param in paramList:
      paramsDict[param.getName()] = {'type': param.getType(), 'value': param.getValue()}

    arguments = []
    scriptName = 'jobDescription.xml'

    if jobDescriptionObject is None:
      # if we are here it's because there's a real file, on disk, that is named 'jobDescription.xml'
      # Messy but need to account for xml file being in /tmp/guid dir
      if self.script:
        if os.path.exists(self.script):
          scriptName = os.path.abspath(self.script)
          self.log.verbose('Found script name %s' % scriptName)
        else:
          self.log.warn("File not found", self.script)
      else:
        if xmlFile:
          if os.path.exists(xmlFile):
            self.log.verbose('Found XML File %s' % xmlFile)
            scriptName = xmlFile
          else:
            self.log.warn("File not found", xmlFile)
        else:
          if os.path.exists('jobDescription.xml'):
            scriptName = os.path.abspath('jobDescription.xml')
            self.log.verbose('Found script name %s' % scriptName)
          else:
            self.log.warn("Job description XML file not found")
      self.addToInputSandbox.append(scriptName)

    elif isinstance(jobDescriptionObject, StringIO.StringIO):
      self.log.verbose("jobDescription is passed in as a StringIO object")

    else:
      self.log.error("Where's the job description?")

    arguments.append(os.path.basename(scriptName))
    if 'LogLevel' in paramsDict:
      if paramsDict['LogLevel']['value']:
        arguments.append('-o LogLevel=%s' % (paramsDict['LogLevel']['value']))
      else:
        self.log.warn('Job LogLevel defined with null value')
    if 'DIRACSetup' in paramsDict:
      if paramsDict['DIRACSetup']['value']:
        arguments.append('-o DIRAC/Setup=%s' % (paramsDict['DIRACSetup']['value']))
      else:
        self.log.warn('Job DIRACSetup defined with null value')
    if 'JobMode' in paramsDict:
      if paramsDict['JobMode']['value']:
        arguments.append('-o JobMode=%s' % (paramsDict['JobMode']['value']))
      else:
        self.log.warn('Job Mode defined with null value')
    if 'JobConfigArgs' in paramsDict:
      if paramsDict['JobConfigArgs']['value']:
        arguments.append('%s' % (paramsDict['JobConfigArgs']['value']))
      else:
        self.log.warn('JobConfigArgs defined with null value')
    if self.parametricWFArguments:
      for name, value in self.parametricWFArguments.iteritems():
        arguments.append("-p %s='%s'" % (name, value))

    classadJob.insertAttributeString('Executable', self.executable)
    self.addToOutputSandbox.append(self.stderr)
    self.addToOutputSandbox.append(self.stdout)

    # Extract i/o sandbox parameters from steps and any input data parameters
    # to do when introducing step-level api...

    # To add any additional files to input and output sandboxes
    if self.addToInputSandbox:
      extraFiles = ';'.join(self.addToInputSandbox)
      if 'InputSandbox' in paramsDict:
        currentFiles = paramsDict['InputSandbox']['value']
        finalInputSandbox = currentFiles + ';' + extraFiles
        uniqueInputSandbox = uniqueElements(finalInputSandbox.split(';'))
        paramsDict['InputSandbox']['value'] = ';'.join(uniqueInputSandbox)
        self.log.verbose('Final unique Input Sandbox %s' % (';'.join(uniqueInputSandbox)))
      else:
        paramsDict['InputSandbox'] = {}
        paramsDict['InputSandbox']['value'] = extraFiles
        paramsDict['InputSandbox']['type'] = 'JDL'

    if self.addToOutputSandbox:
      extraFiles = ';'.join(self.addToOutputSandbox)
      if 'OutputSandbox' in paramsDict:
        currentFiles = paramsDict['OutputSandbox']['value']
        finalOutputSandbox = currentFiles + ';' + extraFiles
        uniqueOutputSandbox = uniqueElements(finalOutputSandbox.split(';'))
        paramsDict['OutputSandbox']['value'] = ';'.join(uniqueOutputSandbox)
        self.log.verbose('Final unique Output Sandbox %s' % (';'.join(uniqueOutputSandbox)))
      else:
        paramsDict['OutputSandbox'] = {}
        paramsDict['OutputSandbox']['value'] = extraFiles
        paramsDict['OutputSandbox']['type'] = 'JDL'

    if self.addToInputData:
      extraFiles = ';'.join(self.addToInputData)
      if 'InputData' in paramsDict:
        currentFiles = paramsDict['InputData']['value']
        finalInputData = extraFiles
        if currentFiles:
          finalInputData = currentFiles + ';' + extraFiles
        uniqueInputData = uniqueElements(finalInputData.split(';'))
        paramsDict['InputData']['value'] = ';'.join(uniqueInputData)
        self.log.verbose('Final unique Input Data %s' % (';'.join(uniqueInputData)))
      else:
        paramsDict['InputData'] = {}
        paramsDict['InputData']['value'] = extraFiles
        paramsDict['InputData']['type'] = 'JDL'

    # Handle parameter sequences
    if self.numberOfParameters > 0:
      paramsDict, arguments = self._handleParameterSequences(paramsDict, arguments)

    classadJob.insertAttributeString('Arguments', ' '.join(arguments))

    # Add any JDL parameters to classad obeying lists with ';' rule
    for name, props in paramsDict.iteritems():
      ptype = props['type']
      value = props['value']
      if isinstance(value, basestring) and re.search(';', value):
        value = value.split(';')
      if name.lower() == 'requirements' and ptype == 'JDL':
        self.log.verbose('Found existing requirements: %s' % (value))

      if re.search('^JDL', ptype):
        if isinstance(value, list):
          if isinstance(value[0], list):
            classadJob.insertAttributeVectorStringList(name, value)
          else:
            classadJob.insertAttributeVectorInt(name, value)
        elif isinstance(value, basestring) and value:
          classadJob.insertAttributeInt(name, value)
        elif isinstance(value, (int, long, float)):
          classadJob.insertAttributeInt(name, value)

    if self.numberOfParameters > 0:
      classadJob.insertAttributeInt('Parameters', self.numberOfParameters)

    for fToBeRemoved in [scriptName, self.stdout, self.stderr]:
      try:
        self.addToInputSandbox.remove(fToBeRemoved)
      except ValueError:
        pass

    jdl = classadJob.asJDL()
    start = jdl.find('[')
    end = jdl.rfind(']')
    return jdl[(start + 1):(end - 1)]
Пример #36
0
    for fname in fileList:
      if fname.endswith( '.bak' ):
        gLogger.verbose( 'Ignoring BAK file: %s' % fname )
      elif tarfile.is_tarfile( fname ):
        try:
          gLogger.debug( 'Unpacking catalog XML file %s' % ( os.path.join( directory, fname ) ) )
          tarFile = tarfile.open( os.path.join( directory, fname ), 'r' )
          for member in tarFile.getmembers():
            tarFile.extract( member, directory )
            poolCatalogList.append( os.path.join( directory, member.name ) )
        except Exception, x :
          gLogger.error( 'Could not untar %s with exception %s' % ( fname, str( x ) ) )
      else:
        poolCatalogList.append( fname )

  poolCatalogList = uniqueElements( poolCatalogList )

  #Now have list of all XML files but some may not be Pool XML catalogs...
  finalCatList = []
  for possibleCat in poolCatalogList:
    try:
      _cat = PoolXMLCatalog( possibleCat )
      finalCatList.append( possibleCat )
    except Exception, x:
      gLogger.debug( 'Ignoring non-POOL catalogue file %s' % possibleCat )

  gLogger.debug( 'Final list of catalog files are: %s' % ', '.join( finalCatList ) )

  return finalCatList

#############################################################################
Пример #37
0
  def obtainWMSJobIDs(self, transformation, fileDict, selectDelay, wmsStatusList):
    """ Group files by the corresponding WMS jobIDs, check the corresponding
        jobs have not been updated for the delay time.  Can't get into any 
        mess because we start from files only in MaxReset / Assigned and check
        corresponding jobs.  Mixtures of files for jobs in MaxReset and Assigned 
        statuses only possibly include some files in Unused status (not Processed 
        for example) that will not be touched.
    """
    prodJobIDs = uniqueElements(fileDict.values())
    self.log.info('The following %s production jobIDs apply to the selected files:\n%s' % (len(prodJobIDs), prodJobIDs))

    jobFileDict = {}
    condDict = {'TransformationID' : transformation, self.taskIDName : prodJobIDs}
    delta = datetime.timedelta( hours = selectDelay )
    now = dateTime()
    olderThan = now-delta

    res = self.prodDB.getTransformationTasks(condDict = condDict, older = olderThan,
                                             timeStamp = 'LastUpdateTime', inputVector = True)
    self.log.debug(res)
    if not res['OK']:
      self.log.error('getTransformationTasks returned an error:\n%s')
      return res
    
    for jobDict in res['Value']:
      missingKey = False
      for key in [self.taskIDName, self.externalID, 'LastUpdateTime', self.externalStatus, 'InputVector']:
        if not jobDict.has_key(key):
          self.log.info('Missing key %s for job dictionary, the following is available:\n%s' % (key, jobDict))
          missingKey = True
          continue
      
      if missingKey:
        continue
        
      job = jobDict[self.taskIDName]
      wmsID = jobDict[self.externalID]
      lastUpdate = jobDict['LastUpdateTime']
      wmsStatus = jobDict[self.externalStatus]
      jobInputData = jobDict['InputVector']
      jobInputData = [lfn.replace('LFN:','') for lfn in jobInputData.split(';')]
      
      if not int(wmsID):
        self.log.info('Prod job %s status is %s (ID = %s) so will not recheck with WMS' %(job, wmsStatus, wmsID))
        continue
      
      self.log.info('Job %s, prod job %s last update %s, production management system status %s' % (wmsID, job, lastUpdate, wmsStatus))
      #Exclude jobs not having appropriate WMS status - have to trust that production management status is correct        
      if not wmsStatus in wmsStatusList:
        self.log.info('Job %s is in status %s, not %s so will be ignored' % (wmsID, wmsStatus, string.join(wmsStatusList, ', ')))
        continue
        
      finalJobData = []
      #Must map unique files -> jobs in expected state
      for lfn,prodID in fileDict.items():
        if int(prodID) == int(job):
          finalJobData.append(lfn)
      
      self.log.info('Found %s files for job %s' % (len(finalJobData), job))    
      jobFileDict[wmsID] = finalJobData
 
    return S_OK(jobFileDict)
Пример #38
0
 def registerUser(self, paramcopy):
     # Unfortunately there is no way to get rid of empty text values in JS, so i have to hardcode it on server side. Hate it!
     default_values = [
         "John Smith", "jsmith", "*****@*****.**",
         "+33 9 10 00 10 00", "Select prefered virtual organization(s)"
     ]
     default_values.append("Select your country")
     default_values.append(
         "Any additional information you want to provide to administrators")
     dn = getUserDN()
     username = getUsername()
     if not username == "anonymous":
         return {
             "success":
             "false",
             "error":
             "You are already registered in DIRAC with username: %s" %
             username
         }
     else:
         if not dn:
             return {
                 "success":
                 "false",
                 "error":
                 "You have to load certificate to your browser before trying to register"
             }
     body = ""
     userMail = False
     vo = []
     for i in paramcopy:
         if not paramcopy[i] in default_values:
             if i == "email":
                 userMail = paramcopy[i]
             if i == "vo":
                 vo = paramcopy[i].split(",")
             body = body + str(i) + ' - "' + str(paramcopy[i]) + '"\n'
     if not userMail:
         return {
             "success": "false",
             "error": "Can not get your email from the request"
         }
     gLogger.info("!!! VO: ", vo)
     # TODO Check for previous requests
     if not len(vo) > 0:
         mails = gConfig.getValue("/Website/UserRegistrationEmail", [])
     else:
         mails = []
         for i in vo:
             i = i.strip()
             voadm = gConfig.getValue("/Registry/VO/%s/VOAdmin" % i, "")
             failsafe = False
             if voadm:
                 tmpmail = gConfig.getValue(
                     "/Registry/Users/%s/Email" % voadm, "")
                 if tmpmail:
                     mails.append(tmpmail)
                 else:
                     gLogger.error(
                         "Can not find value for option /Registry/Users/%s/Email Trying failsafe option"
                         % voadm)
                     failsafe = True
             else:
                 gLogger.error(
                     "Can not find value for option /Registry/VO/%s/VOAdmin Trying failsafe option"
                     % i)
                 failsafe = True
             if failsafe:
                 failsafe = gConfig.getValue(
                     "/Website/UserRegistrationEmail", [])
                 if len(failsafe) > 0:
                     for j in failsafe:
                         mails.append(j)
                 else:
                     gLogger.error(
                         "Can not find value for failsafe option /Website/UserRegistrationEmail User registration for VO %s is failed"
                         % i)
     mails = uniqueElements(mails)
     if not len(mails) > 0:
         groupList = list()
         allGroups = gConfig.getSections("/Registry/Groups")
         if not allGroups["OK"]:
             return {
                 "success": "false",
                 "error": "No groups found at this DIRAC installation"
             }
         allGroups = allGroups["Value"]
         for j in allGroups:
             props = getProperties(j)
             if "UserAdministrator" in props:  # property which usd for user administration
                 groupList.append(j)
         groupList = uniqueElements(groupList)
         if not len(groupList) > 0:
             return {
                 "success": "false",
                 "error":
                 "No groups, resposible for user administration, found"
             }
         userList = list()
         for i in groupList:
             users = gConfig.getValue("/Registry/Groups/%s/Users" % i, [])
             for j in users:
                 userList.append(j)
         userList = uniqueElements(userList)
         if not len(userList) > 0:
             return {
                 "success":
                 "false",
                 "error":
                 "Can not find a person resposible for user administration, your request can not be approuved"
             }
         mails = list()
         mail2name = dict()
         for i in userList:
             tmpmail = gConfig.getValue("/Registry/Users/%s/Email" % i, "")
             if tmpmail:
                 mails.append(tmpmail)
             else:
                 gLogger.error(
                     "Can not find value for option /Registry/Users/%s/Email"
                     % i)
         mails = uniqueElements(mails)
         if not len(mails) > 0:
             return {
                 "success":
                 "false",
                 "error":
                 "Can not find an email of the person resposible for the users administration, your request can not be approuved"
             }
     gLogger.info("Admins emails: ", mails)
     if not len(mails) > 0:
         return {
             "success": "false",
             "error": "Can not find any emails of DIRAC Administrators"
         }
     allUsers = gConfig.getSections("/Registry/Users")
     if not allUsers["OK"]:
         return {
             "success": "false",
             "error": "No users found at this DIRAC installation"
         }
     allUsers = allUsers["Value"]
     mail2name = dict()
     for i in allUsers:
         tmpmail = gConfig.getValue("/Registry/Users/%s/Email" % i, "")
         if tmpmail in mails:
             mail2name[tmpmail] = gConfig.getValue(
                 "/Registry/Users/%s/FullName" % i, i)
     sentFailed = list()
     sentSuccess = list()
     errorMessage = list()
     ntc = NotificationClient(getRPCClient)
     for i in mails:
         i = i.strip()
         result = ntc.sendMail(i, "New user has registered", body, userMail,
                               False)
         if not result["OK"]:
             sentFailed.append(mail2name[i])
             errorMessage.append(result["Message"])
         else:
             sentSuccess.append(mail2name[i])
     gLogger.info("Sent success: ", sentSuccess)
     gLogger.info("Sent failure: ", sentFailed)
     errorMessage = uniqueElements(errorMessage)
     if len(sentSuccess) == 0:
         if not len(errorMessage) > 0:
             return {
                 "success":
                 "false",
                 "error":
                 "No messages were sent to administrators due techincal reasons"
             }
         errorMessage = ", ".join(errorMessage)
         return {"success": "false", "error": errorMessage}
     sName = ", ".join(sentSuccess)
     fName = ", ".join(sentFailed)
     if len(sentFailed) > 0:
         return {
             "success":
             "true",
             "result":
             "Your registration request were sent successfuly to %s. Failed to sent request to %s."
             % (sName, fName)
         }
     return {
         "success":
         "true",
         "result":
         "Your registration request were sent successfuly to %s." % sName
     }
Пример #39
0
    def registerUser(self):
        """
    This function is used to notify DIRAC admins about user registration request
    The logic is simple:
    0) Check if request from this e-mail has already registered or not
    1) Send mail to VO admin of requested VO
    2) Send mail to users in group with UserAdministrator property
    3) Send mail to users indicated in /Website/UserRegistrationAdmin option
    """

        gLogger.info("Start processing a registration request")

        checkUserCredentials()
        # Check for having a DN but no username
        dn = getUserDN()
        if not dn:
            error = "Certificate is not loaded in the browser or DN is absent"
            gLogger.error("Service response: %s" % error)
            return {"success": "false", "error": error}
        username = getUsername()
        if not username == "anonymous":
            error = "You are already registered in DIRAC with username: %s" % username
            gLogger.error("Service response: %s" % error)
            return {"success": "false", "error": error}
        gLogger.info("DN: %s" % dn)

        if not "email" in request.params:
            error = "Can not get your email address from the request"
            gLogger.debug("Service response: %s" % error)
            return {"success": "false", "error": error}
        userMail = request.params["email"]

        result = self.isRequested(userMail)
        gLogger.debug(result)
        if result["OK"]:
            return render("/reg_done.mako")

        result = self.registerRequest(dn, userMail)
        gLogger.debug(result)
        if not result["OK"]:
            return {"success": "false", "error": result["Message"]}

        vo = fromChar(request.params["vo"])
        if not vo:
            error = "You should indicate a VirtualOrganization for membership"
            gLogger.debug("Service response: %s" % error)
            return {"success": "false", "error": error}
        gLogger.info("User want to be register in VO(s): %s" % vo)

        body = str()
        for i in request.params:
            if not i in ["registration_request", "email", "vo"]:
                text = self.checkUnicode(request.params[i])
                info = "%s - %s" % (i, text)
                body = body + info + "\n"
        body = body + "DN - " + dn
        gLogger.debug("email body: %s" % body)

        adminList = self.__getAdminList(vo)
        if not len(adminList) > 0:
            error = "Can't get in contact with administrators about your request\n"
            error = error + "Most likely this DIRAC instance is not configured yet"
            gLogger.debug("Service response: %s" % error)
            return {"success": "false", "error": error}
        adminList = uniqueElements(adminList)
        gLogger.info("Chosen admin(s): %s" % adminList)

        sendDict = self.getMailDict(adminList)
        if not len(sendDict) > 0:
            error = "Can't get in contact with administrators about your request\n"
            error = error + "Most likely this DIRAC instance is not configured yet"
            gLogger.debug("Service response: %s" % error)
            return {"success": "false", "error": error}
        gLogger.debug("Final dictionary with mails to be used %s" % sendDict)

        if socket.gethostname().find('.') >= 0:
            hostname = socket.gethostname()
        else:
            hostname = socket.gethostbyaddr(socket.gethostname())[0]
        title = "New user has sent registration request to %s" % hostname

        return self.sendMail(sendDict, title, body, userMail)
Пример #40
0
    def _toJDL(
        self, xmlFile="", jobDescriptionObject=None
    ):  # messy but need to account for xml file being in /tmp/guid dir
        """Creates a JDL representation of itself as a Job.
    """
        # Check if we have to do old bootstrap...
        classadJob = ClassAd("[]")

        paramsDict = {}
        params = self.workflow.parameters  # ParameterCollection object

        paramList = params
        for param in paramList:
            paramsDict[param.getName()] = {"type": param.getType(), "value": param.getValue()}

        arguments = []
        scriptname = "jobDescription.xml"

        if jobDescriptionObject is None:
            # if we are here it's because there's a real file, on disk, that is named 'jobDescription.xml'
            if self.script:
                if os.path.exists(self.script):
                    scriptname = os.path.abspath(self.script)
                    self.log.verbose("Found script name %s" % scriptname)
                else:
                    self.log.error("File not found", self.script)
            else:
                if xmlFile:
                    self.log.verbose("Found XML File %s" % xmlFile)
                    scriptname = xmlFile
            self.addToInputSandbox.append(scriptname)

        elif isinstance(jobDescriptionObject, StringIO.StringIO):
            self.log.verbose("jobDescription is passed in as a StringIO object")

        else:
            self.log.error("Where's the job description?")

        arguments.append(os.path.basename(scriptname))
        if paramsDict.has_key("LogLevel"):
            if paramsDict["LogLevel"]["value"]:
                arguments.append("-o LogLevel=%s" % (paramsDict["LogLevel"]["value"]))
            else:
                self.log.warn("Job LogLevel defined with null value")
        if paramsDict.has_key("DIRACSetup"):
            if paramsDict["DIRACSetup"]["value"]:
                arguments.append("-o DIRAC/Setup=%s" % (paramsDict["DIRACSetup"]["value"]))
            else:
                self.log.warn("Job DIRACSetup defined with null value")
        if paramsDict.has_key("JobMode"):
            if paramsDict["JobMode"]["value"]:
                arguments.append("-o JobMode=%s" % (paramsDict["JobMode"]["value"]))
            else:
                self.log.warn("Job Mode defined with null value")
        if paramsDict.has_key("JobConfigArgs"):
            if paramsDict["JobConfigArgs"]["value"]:
                arguments.append("%s" % (paramsDict["JobConfigArgs"]["value"]))
            else:
                self.log.warn("JobConfigArgs defined with null value")

        classadJob.insertAttributeString("Executable", self.executable)
        self.addToOutputSandbox.append(self.stderr)
        self.addToOutputSandbox.append(self.stdout)

        # Extract i/o sandbox parameters from steps and any input data parameters
        # to do when introducing step-level api...

        # To add any additional files to input and output sandboxes
        if self.addToInputSandbox:
            extraFiles = ";".join(self.addToInputSandbox)
            if paramsDict.has_key("InputSandbox"):
                currentFiles = paramsDict["InputSandbox"]["value"]
                finalInputSandbox = currentFiles + ";" + extraFiles
                uniqueInputSandbox = uniqueElements(finalInputSandbox.split(";"))
                paramsDict["InputSandbox"]["value"] = ";".join(uniqueInputSandbox)
                self.log.verbose("Final unique Input Sandbox %s" % (";".join(uniqueInputSandbox)))
            else:
                paramsDict["InputSandbox"] = {}
                paramsDict["InputSandbox"]["value"] = extraFiles
                paramsDict["InputSandbox"]["type"] = "JDL"

        if self.addToOutputSandbox:
            extraFiles = ";".join(self.addToOutputSandbox)
            if paramsDict.has_key("OutputSandbox"):
                currentFiles = paramsDict["OutputSandbox"]["value"]
                finalOutputSandbox = currentFiles + ";" + extraFiles
                uniqueOutputSandbox = uniqueElements(finalOutputSandbox.split(";"))
                paramsDict["OutputSandbox"]["value"] = ";".join(uniqueOutputSandbox)
                self.log.verbose("Final unique Output Sandbox %s" % (";".join(uniqueOutputSandbox)))
            else:
                paramsDict["OutputSandbox"] = {}
                paramsDict["OutputSandbox"]["value"] = extraFiles
                paramsDict["OutputSandbox"]["type"] = "JDL"

        if self.addToInputData:
            extraFiles = ";".join(self.addToInputData)
            if paramsDict.has_key("InputData"):
                currentFiles = paramsDict["InputData"]["value"]
                finalInputData = extraFiles
                if currentFiles:
                    finalInputData = currentFiles + ";" + extraFiles
                uniqueInputData = uniqueElements(finalInputData.split(";"))
                paramsDict["InputData"]["value"] = ";".join(uniqueInputData)
                self.log.verbose("Final unique Input Data %s" % (";".join(uniqueInputData)))
            else:
                paramsDict["InputData"] = {}
                paramsDict["InputData"]["value"] = extraFiles
                paramsDict["InputData"]["type"] = "JDL"

        # Handle here the Parametric values
        if self.parametric:
            for pType in ["InputData", "InputSandbox"]:
                if self.parametric.has_key(pType):
                    if paramsDict.has_key(pType) and paramsDict[pType]["value"]:
                        pData = self.parametric[pType]
                        # List of lists case
                        currentFiles = paramsDict[pType]["value"].split(";")
                        tmpList = []
                        if type(pData[0]) == list:
                            for pElement in pData:
                                tmpList.append(currentFiles + pElement)
                        else:
                            for pElement in pData:
                                tmpList.append(currentFiles + [pElement])
                        self.parametric[pType] = tmpList

                    paramsDict[pType] = {}
                    paramsDict[pType]["value"] = "%s"
                    paramsDict[pType]["type"] = "JDL"
                    self.parametric["files"] = self.parametric[pType]
                    arguments.append(" -p Parametric" + pType + "=%s")
                    break

            if self.parametric.has_key("files"):
                paramsDict["Parameters"] = {}
                paramsDict["Parameters"]["value"] = self.parametric["files"]
                paramsDict["Parameters"]["type"] = "JDL"
            if self.parametric.has_key("GenericParameters"):
                paramsDict["Parameters"] = {}
                paramsDict["Parameters"]["value"] = self.parametric["GenericParameters"]
                paramsDict["Parameters"]["type"] = "JDL"
                arguments.append(" -p ParametricParameters=%s")
        ##This needs to be put here so that the InputData and/or InputSandbox parameters for parametric jobs are processed
        classadJob.insertAttributeString("Arguments", " ".join(arguments))

        # Add any JDL parameters to classad obeying lists with ';' rule
        for name, props in paramsDict.items():
            ptype = props["type"]
            value = props["value"]
            if name.lower() == "requirements" and ptype == "JDL":
                self.log.verbose("Found existing requirements: %s" % (value))

            if re.search("^JDL", ptype):
                if type(value) == list:
                    if type(value[0]) == list:
                        classadJob.insertAttributeVectorStringList(name, value)
                    else:
                        classadJob.insertAttributeVectorString(name, value)
                elif value == "%s":
                    classadJob.insertAttributeInt(name, value)
                elif not re.search(";", value) or name == "GridRequirements":  # not a nice fix...
                    classadJob.insertAttributeString(name, value)
                else:
                    classadJob.insertAttributeVectorString(name, value.split(";"))

        for fToBeRemoved in [scriptname, self.stdout, self.stderr]:
            try:
                self.addToInputSandbox.remove(fToBeRemoved)
            except ValueError:
                pass

        jdl = classadJob.asJDL()
        start = jdl.find("[")
        end = jdl.rfind("]")
        return jdl[(start + 1) : (end - 1)]
Пример #41
0
 def __createQuery(self,args):
   """ Create the metadata query out of the command line arguments
   """    
   argss = args.split()
   result = self.fc.getMetadataFields()
   if not result['OK']:
     print ("Error: %s" % result['Message']) 
     return None
   if not result['Value']:
     print "Error: no metadata fields defined"
     return None
   typeDict = result['Value']
   metaDict = {}
   contMode = False
   for arg in argss:
     if not contMode:
       operation = ''
       for op in ['>','<','>=','<=','!=','=']:
         if arg.find(op) != -1:
           operation = op
           break
       if not operation:
         
         print "Error: operation is not found in the query"
         return None
         
       name,value = arg.split(operation)
       if not name in typeDict:
         print "Error: metadata field %s not defined" % name
         return None
       mtype = typeDict[name]
     else:
       value += ' ' + arg
       value = value.replace(contMode,'')
       contMode = False  
     
     if value[0] == '"' or value[0] == "'":
       if value[-1] != '"' and value != "'":
         contMode = value[0]
         continue 
     
     if value.find(',') != -1:
       valueList = [ x.replace("'","").replace('"','') for x in value.split(',') ]
       mvalue = valueList
       if mtype[0:3].lower() == 'int':
         mvalue = [ int(x) for x in valueList if not x in ['Missing','Any'] ]
         mvalue += [ x for x in valueList if x in ['Missing','Any'] ]
       if mtype[0:5].lower() == 'float':
         mvalue = [ float(x) for x in valueList if not x in ['Missing','Any'] ]
         mvalue += [ x for x in valueList if x in ['Missing','Any'] ]
       if operation == "=":
         operation = 'in'
       if operation == "!=":
         operation = 'nin'    
       mvalue = {operation:mvalue}  
     else:            
       mvalue = value.replace("'","").replace('"','')
       if not value in ['Missing','Any']:
         if mtype[0:3].lower() == 'int':
           mvalue = int(value)
         if mtype[0:5].lower() == 'float':
           mvalue = float(value)               
       if operation != '=':     
         mvalue = {operation:mvalue}      
                               
     if name in metaDict:
       if type(metaDict[name]) == DictType:
         if type(mvalue) == DictType:
           op,value = mvalue.items()[0]
           if op in metaDict[name]:
             if type(metaDict[name][op]) == ListType:
               if type(value) == ListType:
                 metaDict[name][op] = uniqueElements(metaDict[name][op] + value)
               else:
                 metaDict[name][op] = uniqueElements(metaDict[name][op].append(value))     
             else:
               if type(value) == ListType:
                 metaDict[name][op] = uniqueElements([metaDict[name][op]] + value)
               else:
                 metaDict[name][op] = uniqueElements([metaDict[name][op],value])       
           else:
             metaDict[name].update(mvalue)
         else:
           if type(mvalue) == ListType:
             metaDict[name].update({'in':mvalue})
           else:  
             metaDict[name].update({'=':mvalue})
       elif type(metaDict[name]) == ListType:   
         if type(mvalue) == DictType:
           metaDict[name] = {'in':metaDict[name]}
           metaDict[name].update(mvalue)
         elif type(mvalue) == ListType:
           metaDict[name] = uniqueElements(metaDict[name] + mvalue)
         else:
           metaDict[name] = uniqueElements(metaDict[name].append(mvalue))      
       else:
         if type(mvalue) == DictType:
           metaDict[name] = {'=':metaDict[name]}
           metaDict[name].update(mvalue)
         elif type(mvalue) == ListType:
           metaDict[name] = uniqueElements([metaDict[name]] + mvalue)
         else:
           metaDict[name] = uniqueElements([metaDict[name],mvalue])          
     else:            
       metaDict[name] = mvalue         
   
   return metaDict 
Пример #42
0
    def __getSiteCandidates(self, inputData):
        """This method returns a list of possible site candidates based on the
       job input data requirement.  For each site candidate, the number of files
       on disk and tape is resolved.
    """

        fileSEs = {}
        for lfn, replicas in inputData.items():
            siteList = []
            for se in replicas.keys():
                sites = self.__getSitesForSE(se)
                if sites['OK']:
                    siteList += sites['Value']
            fileSEs[lfn] = uniqueElements(siteList)

        siteCandidates = []
        i = 0
        for fileName, sites in fileSEs.items():
            if not i:
                siteCandidates = sites
            else:
                tempSite = []
                for site in siteCandidates:
                    if site in sites:
                        tempSite.append(site)
                siteCandidates = tempSite
            i += 1

        if not len(siteCandidates):
            return S_ERROR('No candidate sites available')

        #In addition, check number of files on tape and disk for each site
        #for optimizations during scheduling
        siteResult = {}
        for site in siteCandidates:
            siteResult[site] = {'disk': [], 'tape': []}

        seDict = {}
        for lfn, replicas in inputData.items():
            for se in replicas.keys():
                if se not in seDict:
                    sites = self.__getSitesForSE(se)
                    if not sites['OK']:
                        continue
                    try:
                        #storageElement = StorageElement( se )
                        result = self.resourceStatus.getStorageElementStatus(
                            se, statusType='ReadAccess')
                        if not result['OK']:
                            continue
                        seDict[se] = {
                            'Sites': sites['Value'],
                            'SEParams': result['Value'][se]
                        }
                        result = self.resourcesHelper.getStorageElementOptionsDict(
                            se)
                        if not result['OK']:
                            continue
                        seDict[se]['SEParams'].update(result['Value'])
                    except Exception:
                        self.log.exception(
                            'Failed to instantiate StorageElement( %s )' % se)
                        continue
                for site in seDict[se]['Sites']:
                    if site in siteCandidates:
                        if seDict[se]['SEParams']['ReadAccess'] and seDict[se][
                                'SEParams']['DiskSE']:
                            if lfn not in siteResult[site]['disk']:
                                siteResult[site]['disk'].append(lfn)
                                if lfn in siteResult[site]['tape']:
                                    siteResult[site]['tape'].remove(lfn)
                        if seDict[se]['SEParams']['ReadAccess'] and seDict[se][
                                'SEParams']['TapeSE']:
                            if lfn not in siteResult[site][
                                    'tape'] and lfn not in siteResult[site][
                                        'disk']:
                                siteResult[site]['tape'].append(lfn)

        for site in siteResult:
            siteResult[site]['disk'] = len(siteResult[site]['disk'])
            siteResult[site]['tape'] = len(siteResult[site]['tape'])
        return S_OK(siteResult)
Пример #43
0
 def _prepareRemoteHost( self, host = None ):
   """ Prepare remote directories and upload control script 
   """
   
   ssh = SSH( host = host, parameters = self.ceParameters )
   
   # Make remote directories
   dirTuple = tuple ( uniqueElements( [self.sharedArea, 
                                       self.executableArea, 
                                       self.infoArea, 
                                       self.batchOutput, 
                                       self.batchError,
                                       self.workArea] ) )
   nDirs = len( dirTuple )
   cmd = 'mkdir -p %s; '*nDirs % dirTuple
   cmd = "bash -c '%s'" % cmd
   self.log.verbose( 'Creating working directories on %s' % self.ceParameters['SSHHost'] )
   result = ssh.sshCall( 30, cmd )
   if not result['OK']:
     self.log.warn( 'Failed creating working directories: %s' % result['Message'][1] )
     return result
   status, output, _error = result['Value']
   if status == -1:
     self.log.warn( 'Timeout while creating directories' )
     return S_ERROR( 'Timeout while creating directories' )
   if "cannot" in output:
     self.log.warn( 'Failed to create directories: %s' % output )
     return S_ERROR( 'Failed to create directories: %s' % output )
   
   # Upload the control script now
   batchSystemDir = os.path.join( rootPath, "DIRAC", "Resources", "Computing", "BatchSystems" )
   batchSystemScript = os.path.join( batchSystemDir, '%s.py' % self.batchSystem )
   batchSystemExecutor = os.path.join( batchSystemDir, 'executeBatch.py' )
   self.log.verbose( 'Uploading %s script to %s' % ( self.batchSystem, self.ceParameters['SSHHost'] ) )
   remoteScript = '%s/execute_batch' % self.sharedArea
   result = ssh.scpCall( 30, 
                         '%s %s' % ( batchSystemScript, batchSystemExecutor ), 
                         remoteScript,
                         postUploadCommand = 'chmod +x %s' % remoteScript )
   if not result['OK']:
     self.log.warn( 'Failed uploading control script: %s' % result['Message'][1] )
     return result
   status, output, _error = result['Value']
   if status != 0:
     if status == -1:
       self.log.warn( 'Timeout while uploading control script' )
       return S_ERROR( 'Timeout while uploading control script' )
     else:  
       self.log.warn( 'Failed uploading control script: %s' % output )
       return S_ERROR( 'Failed uploading control script' )
     
   # Chmod the control scripts
   #self.log.verbose( 'Chmod +x control script' )
   #result = ssh.sshCall( 10, "chmod +x %s/%s" % ( self.sharedArea, self.controlScript ) )
   #if not result['OK']:
   #  self.log.warn( 'Failed chmod control script: %s' % result['Message'][1] )
   #  return result
   #status, output, _error = result['Value']
   #if status != 0:
   #  if status == -1:
   #    self.log.warn( 'Timeout while chmod control script' )
   #    return S_ERROR( 'Timeout while chmod control script' )
   #  else:  
   #    self.log.warn( 'Failed uploading chmod script: %s' % output )
   #    return S_ERROR( 'Failed uploading chmod script' )
   
   return S_OK()
Пример #44
0
  def __getSiteCandidates( self, inputData ):
    """This method returns a list of possible site candidates based on the
       job input data requirement.  For each site candidate, the number of files
       on disk and tape is resolved.
    """

    fileSEs = {}
    for lfn, replicas in inputData.items():
      siteList = []
      for se in replicas.keys():
        sites = self.__getSitesForSE( se )
        if sites['OK']:
          siteList += sites['Value']
      fileSEs[lfn] = uniqueElements( siteList )

    siteCandidates = []
    i = 0
    for _fileName, sites in fileSEs.items():
      if not i:
        siteCandidates = sites
      else:
        tempSite = []
        for site in siteCandidates:
          if site in sites:
            tempSite.append( site )
        siteCandidates = tempSite
      i += 1

    if not len( siteCandidates ):
      return S_ERROR( 'No candidate sites available' )

    #In addition, check number of files on tape and disk for each site
    #for optimizations during scheduling
    siteResult = {}
    for site in siteCandidates:
      siteResult[site] = { 'disk': [], 'tape': [] }

    seDict = {}
    for lfn, replicas in inputData.items():
      for se in replicas.keys():
        if se not in seDict:
          sites = self.__getSitesForSE( se )
          if not sites['OK']:
            continue
          try:
            #storageElement = StorageElement( se )
            result = self.resourceStatus.getStorageElementStatus( se, statusType = 'ReadAccess' )
            if not result['OK']:
              continue
            seDict[se] = { 'Sites': sites['Value'], 'SEParams': result['Value'][se] }
            result = getStorageElementOptions( se )
            if not result['OK']:
              continue
            seDict[se]['SEParams'].update(result['Value'])
          except Exception:
            self.log.exception( 'Failed to instantiate StorageElement( %s )' % se )
            continue
        for site in seDict[se]['Sites']:
          if site in siteCandidates:
            if seDict[se]['SEParams']['ReadAccess'] and seDict[se]['SEParams']['DiskSE']:
              if lfn not in siteResult[site]['disk']:
                siteResult[site]['disk'].append( lfn )
                if lfn in siteResult[site]['tape']:
                  siteResult[site]['tape'].remove( lfn )
            if seDict[se]['SEParams']['ReadAccess'] and seDict[se]['SEParams']['TapeSE']:
              if lfn not in siteResult[site]['tape'] and lfn not in siteResult[site]['disk']:
                siteResult[site]['tape'].append( lfn )

    for site in siteResult:
      siteResult[site]['disk'] = len( siteResult[site]['disk'] )
      siteResult[site]['tape'] = len( siteResult[site]['tape'] )
    return S_OK( siteResult )
Пример #45
0
 def registerUser(self,paramcopy):
   gLogger.info("Start processing a registration request")
   """
   Unfortunately there is no way to get rid of empty text values in JS,
   so i have to hardcode it on server side. Hate it!
   """
   default_values = ["John Smith","jsmith","*****@*****.**","+33 9 10 00 10 00"]
   default_values.append("Select preferred virtual organization(s)")
   default_values.append("Select your country")
   default_values.append("Any additional information you want to provide to administrators")
   # Check for having a DN but no username
   dn = getUserDN()
   username = getUsername()
   gLogger.debug("User's DN: %s and DIRAC username: %s" % (dn, username))
   if not username == "anonymous":
     error = "You are already registered in DIRAC with username: %s" % username
     gLogger.debug("Service response: %s" % error)
     return {"success":"false","error":error}
   else:
     if not dn:
       error = "Certificate is not loaded to a browser or DN is absent"
       gLogger.debug("Service response: %s" % error)
       return {"success":"false","error":error}
   body = ""
   userMail = False
   vo = []
   # Check for user's email, creating mail body
   gLogger.debug("Request's body:")
   for i in paramcopy:
     gLogger.debug("%s - %s" % (i,paramcopy[i]))
     if not paramcopy[i] in default_values:
       if i == "email":
         userMail = paramcopy[i]
       if i == "vo":
         vo = paramcopy[i].split(",")
       body = body + str(i) + ' - "' + str(paramcopy[i]) + '"\n'
   if not userMail:
     error = "Can not get your email address from the request"
     gLogger.debug("Service response: %s" % error)
     return {"success":"false","error":error}
   gLogger.info("User want to be register in VO(s): %s" % vo)
   # TODO Check for previous requests
   # Get admin mail based on requested VO i.e. mail of VO admin
   mails = list()
   gLogger.debug("Trying to get admin username to take care about request")
   for i in vo:
     gLogger.debug("VOAdmin for VO: %s" % i)
     i = i.strip()
     voadm = gConfig.getValue("/Registry/VO/%s/VOAdmin" % i,[])
     gLogger.debug("/Registry/VO/%s/VOAdmin - %s" % (i,voadm))
     for user in voadm:
       mails.append(user)
   # If no VOAdmin - try to get admin mails based on group properties
   if not len(mails) > 0:
     gLogger.debug("No VO admins found. Trying to get something based on group property")
     groupList = list()
     groups = gConfig.getSections("/Registry/Groups")
     gLogger.debug("Group response: %s" % groups)
     if groups["OK"]:
       allGroups = groups["Value"]
       gLogger.debug("Looking for UserAdministrator property")
       for j in allGroups:
         props = getProperties(j)
         gLogger.debug("%s properties: %s" % (j,props)) #1
         if "UserAdministrator" in props: # property which is used for user administration
           groupList.append(j)
     groupList = uniqueElements(groupList)
     gLogger.debug("Chosen group(s): %s" % groupList)
     if len(groupList) > 0:
       for i in groupList:
         users = gConfig.getValue("/Registry/Groups/%s/Users" % i,[])
         gLogger.debug("%s users: %s" % (i,users))
         for user in users:
           mails.append(user)
   # Last stand - Failsafe option
   if not len(mails) > 0:
     gLogger.debug("No suitable groups found. Trying failsafe")
     regAdmin = gConfig.getValue("/Website/UserRegistrationAdmin",[])
     gLogger.debug("/Website/UserRegistrationAdmin - %s" % regAdmin)
     for user in regAdmin:
       mails.append(user)
   mails = uniqueElements(mails)
   gLogger.info("Chosen admin(s): %s" % mails)
   # Final check of usernames
   if not len(mails) > 0:
     error = "Can't get in contact with administrators about your request\n"
     error = error + "Most likely this DIRAC instance is not configured yet"
     gLogger.debug("Service response: %s" % error)
     return {"success":"false","error":error}
   # Convert usernames to { e-mail : full name }
   gLogger.debug("Trying to get admin's mail and associated name")
   sendDict = dict()
   for user in mails:
     email = gConfig.getValue("/Registry/Users/%s/Email" % user,"")
     gLogger.debug("/Registry/Users/%s/Email - '%s'" % (user,email))
     emil = email.strip()
     if not email:
       gLogger.error("Can't find value for option /Registry/Users/%s/Email" % user)
       continue
     fname = gConfig.getValue("/Registry/Users/%s/FullName" % user,"")
     gLogger.debug("/Registry/Users/%s/FullName - '%s'" % (user,fname))
     fname = fname.strip()
     if not fname:
       fname = user
       gLogger.debug("FullName is absent, name to be used: %s" % fname)
     sendDict[email] = fname
   # Final check of mails
   gLogger.debug("Final dictionary with mails to be used %s" % sendDict)
   if not len(sendDict) > 0:
     error = "Can't get in contact with administrators about your request\n"
     error = error + "Most likely this DIRAC instance is not configured yet"
     gLogger.debug("Service response: %s" % error)
     return {"success":"false","error":error}
   # Sending a mail
   sentSuccess = list()
   sentFailed = list()
   gLogger.debug("Initializing Notification client")
   ntc = NotificationClient(lambda x, timeout: getRPCClient(x, timeout=timeout, static = True) )
   gLogger.debug("Sending messages")
   for email,name in sendDict.iteritems():
     gLogger.debug("ntc.sendMail(%s,New user has registered,%s,%s,False" % (email,body,userMail))
     result = ntc.sendMail(email,"New user has registered",body,userMail,False)
     if not result["OK"]:
       error = name + ": " + result["Message"]
       sentFailed.append(error)
       gLogger.error("Sent failure: ", error)
     else:
       gLogger.info("Successfully sent to %s" % name)
       sentSuccess.append(name)
   # Returning results
   sName = ", ".join(sentSuccess)
   gLogger.info("End of processing of a registration request")
   gLogger.debug("Service response sent to a user:"******"Your registration request were sent successfully to: "
     result = result + sName + "\n\nFailed to sent it to:\n"
     result = result + "\n".join(sentFailed)
     gLogger.debug(result)
     return {"success":"true","result":result}
   elif len(sentSuccess) > 0 and (not len(sentFailed)) > 0:
     result = "Your registration request were sent successfully to: %s" % sName
     gLogger.debug(result)
     return {"success":"true","result":result}
   elif (not len(sentSuccess)) > 0 and len(sentFailed) > 0:
     result = "Failed to sent your request to:\n"
     result = result + "\n".join(sentFailed)
     gLogger.debug(result)
     return {"success":"false","error":result}
   else:
     result = "No messages were sent to administrator due technical failure"
     gLogger.debug(result)
     return {"success":"false","error":result}
Пример #46
0
  def registerUser( self ):

    """
    This function is used to notify DIRAC admins about user registration request
    The logic is simple:
    0) Check if request from this e-mail has already registered or not
    1) Send mail to VO admin of requested VO
    2) Send mail to users in group with UserAdministrator property
    3) Send mail to users indicated in /Website/UserRegistrationAdmin option
    """
    
    gLogger.info("Start processing a registration request")

    checkUserCredentials()
    # Check for having a DN but no username
    dn = getUserDN()
    if not dn:
      error = "Certificate is not loaded in the browser or DN is absent"
      gLogger.error( "Service response: %s" % error )
      return { "success" : "false" , "error" : error }
    username = getUsername()
    if not username == "anonymous":
      error = "You are already registered in DIRAC with username: %s" % username
      gLogger.error( "Service response: %s" % error )
      return { "success" : "false" , "error" : error }
    gLogger.info( "DN: %s" % dn )

    if not request.params.has_key( "email" ):
      error = "Can not get your email address from the request"
      gLogger.debug( "Service response: %s" % error )
      return { "success" : "false" , "error" : error }
    userMail = request.params[ "email" ]

    if self.alreadyRequested( userMail ):
      error = "Request associated with %s already registered" % userMail
      gLogger.debug( "Service response: %s" % error )
      return { "success" : "false" , "error" : error }

    vo = fromChar( request.params[ "vo" ] )
    if not vo:
      error = "You should indicate a VirtualOrganization for membership"
      gLogger.debug( "Service response: %s" % error )
      return { "success" : "false" , "error" : error }
    gLogger.info( "User want to be register in VO(s): %s" % vo )

    body = str()
    for i in request.params:
      if not i in [ "registration_request" , "email" , "vo" ]:
        info = self.__checkUnicode( i , request.params[ i ] )
        body = body + info + "\n"
    body = body + "DN - " + dn
    gLogger.debug( "email body: %s" % body )

    adminList = self.__getAdminList( vo )
    if not len( adminList ) > 0:
      error = "Can't get in contact with administrators about your request\n"
      error = error + "Most likely this DIRAC instance is not configured yet"
      gLogger.debug( "Service response: %s" % error )
      return { "success" : "false" , "error" : error }
    adminList = uniqueElements( adminList )
    gLogger.info( "Chosen admin(s): %s" % adminList )
    
    sendDict = self.__getMailDict( adminList )
    if not len(sendDict) > 0:
      error = "Can't get in contact with administrators about your request\n"
      error = error + "Most likely this DIRAC instance is not configured yet"
      gLogger.debug( "Service response: %s" % error )
      return { "success" : "false" , "error" : error }
    gLogger.debug( "Final dictionary with mails to be used %s" % sendDict )

    return self.__sendAMail( sendDict , body , userMail )
Пример #47
0
  def registerUser( self ):

    """
    This function is used to notify DIRAC admins about user registration request
    The logic is simple:
    0) Check if request from this e-mail has already registered or not
    1) Send mail to VO admin of requested VO
    2) Send mail to users in group with UserAdministrator property
    3) Send mail to users indicated in /Website/UserRegistrationAdmin option
    """
    
    gLogger.info("Start processing a registration request")

    checkUserCredentials()
    # Check for having a DN but no username
    dn = getUserDN()
    if not dn:
      error = "Certificate is not loaded in the browser or DN is absent"
      gLogger.error( "Service response: %s" % error )
      return { "success" : "false" , "error" : error }
    username = getUsername()
    if not username == "anonymous":
      error = "You are already registered in DIRAC with username: %s" % username
      gLogger.error( "Service response: %s" % error )
      return { "success" : "false" , "error" : error }
    gLogger.info( "DN: %s" % dn )

    if not "email" in request.params:
      error = "Can not get your email address from the request"
      gLogger.debug( "Service response: %s" % error )
      return { "success" : "false" , "error" : error }
    userMail = request.params[ "email" ]

    result = self.isRequested( userMail )
    gLogger.debug( result )
    if result[ "OK" ]:
      return render( "/reg_done.mako" )

    result = self.registerRequest( dn , userMail )
    gLogger.debug( result )
    if not result[ "OK" ]:
      return { "success" : "false" , "error" : result[ "Message" ] }

    vo = fromChar( request.params[ "vo" ] )
    if not vo:
      error = "You should indicate a VirtualOrganization for membership"
      gLogger.debug( "Service response: %s" % error )
      return { "success" : "false" , "error" : error }
    gLogger.info( "User want to be register in VO(s): %s" % vo )

    body = str()
    for i in request.params:
      if not i in [ "registration_request" , "email" , "vo" ]:
        text = self.checkUnicode( request.params[ i ] )
        info = "%s - %s" % ( i , text )
        body = body + info + "\n"
    body = body + "DN - " + dn
    gLogger.debug( "email body: %s" % body )

    adminList = self.__getAdminList( vo )
    if not len( adminList ) > 0:
      error = "Can't get in contact with administrators about your request\n"
      error = error + "Most likely this DIRAC instance is not configured yet"
      gLogger.debug( "Service response: %s" % error )
      return { "success" : "false" , "error" : error }
    adminList = uniqueElements( adminList )
    gLogger.info( "Chosen admin(s): %s" % adminList )
    
    sendDict = self.getMailDict( adminList )
    if not len( sendDict ) > 0:
      error = "Can't get in contact with administrators about your request\n"
      error = error + "Most likely this DIRAC instance is not configured yet"
      gLogger.debug( "Service response: %s" % error )
      return { "success" : "false" , "error" : error }
    gLogger.debug( "Final dictionary with mails to be used %s" % sendDict )

    if socket.gethostname().find( '.' ) >= 0:
      hostname = socket.gethostname()
    else:
      hostname = socket.gethostbyaddr( socket.gethostname() )[ 0 ]
    title = "New user has sent registration request to %s" % hostname

    return self.sendMail( sendDict , title , body , userMail )
Пример #48
0
def getDestinationSEList(outputSE, site, outputmode='Any'):
    """ Evaluate the output SE list from a workflow and return the concrete list
      of SEs to upload output data.
  """
    # Add output SE defined in the job description
    gLogger.info('Resolving workflow output SE description: %s' % outputSE)

    # Check if the SE is defined explicitly for the site
    prefix = site.split('.')[0]
    country = site.split('.')[-1]
    # Concrete SE name
    result = gConfig.getOptions('/Resources/StorageElements/' + outputSE)
    if result['OK']:
        gLogger.info('Found concrete SE %s' % outputSE)
        return S_OK([outputSE])
    # There is an alias defined for this Site
    alias_se = gConfig.getValue(
        '/Resources/Sites/%s/%s/AssociatedSEs/%s' % (prefix, site, outputSE),
        [])
    if alias_se:
        gLogger.info('Found associated SE for site %s' % (alias_se))
        return S_OK(alias_se)

    localSEs = getSEsForSite(site)['Value']
    gLogger.verbose('Local SE list is: %s' % (localSEs))
    groupSEs = gConfig.getValue('/Resources/StorageElementGroups/' + outputSE,
                                [])
    gLogger.verbose('Group SE list is: %s' % (groupSEs))
    if not groupSEs:
        return S_ERROR('Failed to resolve SE ' + outputSE)

    if outputmode.lower() == "local":
        for se in localSEs:
            if se in groupSEs:
                gLogger.info('Found eligible local SE: %s' % (se))
                return S_OK([se])

        #check if country is already one with associated SEs
        associatedSE = gConfig.getValue(
            '/Resources/Countries/%s/AssociatedSEs/%s' % (country, outputSE),
            '')
        if associatedSE:
            gLogger.info(
                'Found associated SE %s in /Resources/Countries/%s/AssociatedSEs/%s'
                % (associatedSE, country, outputSE))
            return S_OK([associatedSE])

        # Final check for country associated SE
        count = 0
        assignedCountry = country
        while count < 10:
            gLogger.verbose('Loop count = %s' % (count))
            gLogger.verbose("/Resources/Countries/%s/AssignedTo" %
                            assignedCountry)
            opt = gConfig.getOption("/Resources/Countries/%s/AssignedTo" %
                                    assignedCountry)
            if opt['OK'] and opt['Value']:
                assignedCountry = opt['Value']
                gLogger.verbose('/Resources/Countries/%s/AssociatedSEs' %
                                assignedCountry)
                assocCheck = gConfig.getOption(
                    '/Resources/Countries/%s/AssociatedSEs' % assignedCountry)
                if assocCheck['OK'] and assocCheck['Value']:
                    break
            count += 1

        if not assignedCountry:
            return S_ERROR('Could not determine associated SE list for %s' %
                           country)

        alias_se = gConfig.getValue(
            '/Resources/Countries/%s/AssociatedSEs/%s' %
            (assignedCountry, outputSE), [])
        if alias_se:
            gLogger.info('Found alias SE for site: %s' % alias_se)
            return S_OK(alias_se)
        else:
            gLogger.error(
                'Could not establish alias SE for country %s from section: /Resources/Countries/%s/AssociatedSEs/%s'
                % (country, assignedCountry, outputSE))
            return S_ERROR('Failed to resolve SE ' + outputSE)

    # For collective Any and All modes return the whole group

    # Make sure that local SEs are passing first
    newSEList = []
    for se in groupSEs:
        if se in localSEs:
            newSEList.append(se)
    uniqueSEs = uniqueElements(newSEList + groupSEs)
    gLogger.verbose('Found unique SEs: %s' % (uniqueSEs))
    return S_OK(uniqueSEs)
Пример #49
0
Файл: Job.py Проект: sposs/DIRAC
  def _toJDL( self, xmlFile = '' ): #messy but need to account for xml file being in /tmp/guid dir
    """Creates a JDL representation of itself as a Job.
    """
    #Check if we have to do old bootstrap...
    classadJob = ClassAd( '[]' )

    paramsDict = {}
    params = self.workflow.parameters # ParameterCollection object

    paramList = params
    for param in paramList:
      paramsDict[param.getName()] = {'type':param.getType(), 'value':param.getValue()}

    scriptname = 'jobDescription.xml'
    arguments = []
    if self.script:
      if os.path.exists( self.script ):
        scriptname = os.path.abspath( self.script )
        self.log.verbose( 'Found script name %s' % scriptname )
    else:
      if xmlFile:
        self.log.verbose( 'Found XML File %s' % xmlFile )
        scriptname = xmlFile

    arguments.append( os.path.basename( scriptname ) )
    self.addToInputSandbox.append( scriptname )
    if paramsDict.has_key( 'LogLevel' ):
      if paramsDict['LogLevel']['value']:
        arguments.append( '-o LogLevel=%s' % ( paramsDict['LogLevel']['value'] ) )
      else:
        self.log.warn( 'Job LogLevel defined with null value' )
    if paramsDict.has_key( 'DIRACSetup' ):
      if paramsDict['DIRACSetup']['value']:
        arguments.append( '-o DIRAC/Setup=%s' % ( paramsDict['DIRACSetup']['value'] ) )
      else:
        self.log.warn( 'Job DIRACSetup defined with null value' )
    if paramsDict.has_key( 'JobMode' ):
      if paramsDict['JobMode']['value']:
        arguments.append( '-o JobMode=%s' % ( paramsDict['JobMode']['value'] ) )
      else:
        self.log.warn( 'Job Mode defined with null value' )
    if paramsDict.has_key( 'JobConfigArgs' ):
      if paramsDict['JobConfigArgs']['value']:
        arguments.append( '%s' % ( paramsDict['JobConfigArgs']['value'] ) )
      else:
        self.log.warn( 'JobConfigArgs defined with null value' )

    classadJob.insertAttributeString( 'Executable', self.executable )
    self.addToOutputSandbox.append( self.stderr )
    self.addToOutputSandbox.append( self.stdout )

    #Extract i/o sandbox parameters from steps and any input data parameters
    #to do when introducing step-level api...

    #To add any additional files to input and output sandboxes
    if self.addToInputSandbox:
      extraFiles = ';'.join( self.addToInputSandbox )
      if paramsDict.has_key( 'InputSandbox' ):
        currentFiles = paramsDict['InputSandbox']['value']
        finalInputSandbox = currentFiles + ';' + extraFiles
        uniqueInputSandbox = uniqueElements( finalInputSandbox.split( ';' ) )
        paramsDict['InputSandbox']['value'] = ';'.join( uniqueInputSandbox )
        self.log.verbose( 'Final unique Input Sandbox %s' % ( ';'.join( uniqueInputSandbox ) ) )
      else:
        paramsDict['InputSandbox'] = {}
        paramsDict['InputSandbox']['value'] = extraFiles
        paramsDict['InputSandbox']['type'] = 'JDL'

    if self.addToOutputSandbox:
      extraFiles = ';'.join( self.addToOutputSandbox )
      if paramsDict.has_key( 'OutputSandbox' ):
        currentFiles = paramsDict['OutputSandbox']['value']
        finalOutputSandbox = currentFiles + ';' + extraFiles
        uniqueOutputSandbox = uniqueElements( finalOutputSandbox.split( ';' ) )
        paramsDict['OutputSandbox']['value'] = ';'.join( uniqueOutputSandbox )
        self.log.verbose( 'Final unique Output Sandbox %s' % ( ';'.join( uniqueOutputSandbox ) ) )
      else:
        paramsDict['OutputSandbox'] = {}
        paramsDict['OutputSandbox']['value'] = extraFiles
        paramsDict['OutputSandbox']['type'] = 'JDL'

    if self.addToInputData:
      extraFiles = ';'.join( self.addToInputData )
      if paramsDict.has_key( 'InputData' ):
        currentFiles = paramsDict['InputData']['value']
        finalInputData = extraFiles
        if currentFiles:
          finalInputData = currentFiles + ';' + extraFiles
        uniqueInputData = uniqueElements( finalInputData.split( ';' ) )
        paramsDict['InputData']['value'] = ';'.join( uniqueInputData )
        self.log.verbose( 'Final unique Input Data %s' % ( ';'.join( uniqueInputData ) ) )
      else:
        paramsDict['InputData'] = {}
        paramsDict['InputData']['value'] = extraFiles
        paramsDict['InputData']['type'] = 'JDL'

    # Handle here the Parametric values
    if self.parametric:
      for pType in ['InputData', 'InputSandbox']:
        if self.parametric.has_key( pType ):
          if paramsDict.has_key( pType ) and paramsDict[pType]['value']:
            pData = self.parametric[pType]
            # List of lists case
            currentFiles = paramsDict[pType]['value'].split( ';' )
            tmpList = []
            if type( pData[0] ) == list:
              for pElement in pData:
                tmpList.append( currentFiles + pElement )
            else:
              for pElement in pData:
                tmpList.append( currentFiles + [pElement] )
            self.parametric[pType] = tmpList

          paramsDict[pType] = {}
          paramsDict[pType]['value'] = "%s"
          paramsDict[pType]['type'] = 'JDL'
          self.parametric['files'] = self.parametric[pType]
          arguments.append( ' -p Parametric' + pType + '=%s' )
          break

      if self.parametric.has_key( 'files' ):
        paramsDict['Parameters'] = {}
        paramsDict['Parameters']['value'] = self.parametric['files']
        paramsDict['Parameters']['type'] = 'JDL'
      if self.parametric.has_key( 'GenericParameters' ):
        paramsDict['Parameters'] = {}
        paramsDict['Parameters']['value'] = self.parametric['GenericParameters']
        paramsDict['Parameters']['type'] = 'JDL'
        arguments.append( ' -p ParametricParameters=%s' )
    ##This needs to be put here so that the InputData and/or InputSandbox parameters for parametric jobs are processed
    classadJob.insertAttributeString( 'Arguments', ' '.join( arguments ) )

    #Add any JDL parameters to classad obeying lists with ';' rule
    requirements = False
    for name, props in paramsDict.items():
      ptype = props['type']
      value = props['value']
      if name.lower() == 'requirements' and ptype == 'JDL':
        self.log.verbose( 'Found existing requirements: %s' % ( value ) )
        requirements = True

      if re.search( '^JDL', ptype ):
        if type( value ) == list:
          if type( value[0] ) == list:
            classadJob.insertAttributeVectorStringList( name, value )
          else:
            classadJob.insertAttributeVectorString( name, value )
        elif value == "%s":
          classadJob.insertAttributeInt( name, value )
        elif not re.search( ';', value ) or name == 'GridRequirements': #not a nice fix...
          classadJob.insertAttributeString( name, value )
        else:
          classadJob.insertAttributeVectorString( name, value.split( ';' ) )

    if not requirements:
      reqtsDict = self.reqParams
      exprn = ''
      plus = ''
      for name, props in paramsDict.items():
        ptype = paramsDict[name]['type']
        value = paramsDict[name]['value']
        if not ptype == 'dict':
          if ptype == 'JDLReqt':
            if value and not value.lower() == 'any':
              plus = ' && '
              if re.search( ';', value ):
                for val in value.split( ';' ):
                  exprn += reqtsDict[name].replace( 'NAME', name ).replace( 'VALUE', str( val ) ) + plus
              else:
                exprn += reqtsDict[name].replace( 'NAME', name ).replace( 'VALUE', str( value ) ) + plus

      if len( plus ):
        exprn = exprn[:-len( plus )]
      if not exprn:
        exprn = 'true'
      self.log.verbose( 'Requirements: %s' % ( exprn ) )
      #classadJob.set_expression('Requirements', exprn)

    self.addToInputSandbox.remove( scriptname )
    self.addToOutputSandbox.remove( self.stdout )
    self.addToOutputSandbox.remove( self.stderr )
    jdl = classadJob.asJDL()
    start = jdl.find( '[' )
    end = jdl.rfind( ']' )
    return jdl[( start + 1 ):( end - 1 )]
Пример #50
0
  def _toJDL( self, xmlFile = '' ): #messy but need to account for xml file being in /tmp/guid dir
    """Creates a JDL representation of itself as a Job.
    """
    #Check if we have to do old bootstrap...
    classadJob = ClassAd( '[]' )

    paramsDict = {}
    params = self.workflow.parameters # ParameterCollection object

    paramList = params
    for param in paramList:
      paramsDict[param.getName()] = {'type':param.getType(), 'value':param.getValue()}

    scriptname = 'jobDescription.xml'
    arguments = []
    if self.script:
      if os.path.exists( self.script ):
        scriptname = os.path.abspath( self.script )
        self.log.verbose( 'Found script name %s' % scriptname )
    else:
      if xmlFile:
        self.log.verbose( 'Found XML File %s' % xmlFile )
        scriptname = xmlFile

    arguments.append( os.path.basename( scriptname ) )
    self.addToInputSandbox.append( scriptname )
    if paramsDict.has_key( 'LogLevel' ):
      if paramsDict['LogLevel']['value']:
        arguments.append( '-o LogLevel=%s' % ( paramsDict['LogLevel']['value'] ) )
      else:
        self.log.warn( 'Job LogLevel defined with null value' )
    if paramsDict.has_key( 'DIRACSetup' ):
      if paramsDict['DIRACSetup']['value']:
        arguments.append( '-o DIRAC/Setup=%s' % ( paramsDict['DIRACSetup']['value'] ) )
      else:
        self.log.warn( 'Job DIRACSetup defined with null value' )
    if paramsDict.has_key( 'JobMode' ):
      if paramsDict['JobMode']['value']:
        arguments.append( '-o JobMode=%s' % ( paramsDict['JobMode']['value'] ) )
      else:
        self.log.warn( 'Job Mode defined with null value' )
    if paramsDict.has_key( 'JobConfigArgs' ):
      if paramsDict['JobConfigArgs']['value']:
        arguments.append( '%s' % ( paramsDict['JobConfigArgs']['value'] ) )
      else:
        self.log.warn( 'JobConfigArgs defined with null value' )

    classadJob.insertAttributeString( 'Executable', self.executable )
    self.addToOutputSandbox.append( self.stderr )
    self.addToOutputSandbox.append( self.stdout )

    #Extract i/o sandbox parameters from steps and any input data parameters
    #to do when introducing step-level api...

    #To add any additional files to input and output sandboxes
    if self.addToInputSandbox:
      extraFiles = ';'.join( self.addToInputSandbox )
      if paramsDict.has_key( 'InputSandbox' ):
        currentFiles = paramsDict['InputSandbox']['value']
        finalInputSandbox = currentFiles + ';' + extraFiles
        uniqueInputSandbox = uniqueElements( finalInputSandbox.split( ';' ) )
        paramsDict['InputSandbox']['value'] = ';'.join( uniqueInputSandbox )
        self.log.verbose( 'Final unique Input Sandbox %s' % ( ';'.join( uniqueInputSandbox ) ) )
      else:
        paramsDict['InputSandbox'] = {}
        paramsDict['InputSandbox']['value'] = extraFiles
        paramsDict['InputSandbox']['type'] = 'JDL'

    if self.addToOutputSandbox:
      extraFiles = ';'.join( self.addToOutputSandbox )
      if paramsDict.has_key( 'OutputSandbox' ):
        currentFiles = paramsDict['OutputSandbox']['value']
        finalOutputSandbox = currentFiles + ';' + extraFiles
        uniqueOutputSandbox = uniqueElements( finalOutputSandbox.split( ';' ) )
        paramsDict['OutputSandbox']['value'] = ';'.join( uniqueOutputSandbox )
        self.log.verbose( 'Final unique Output Sandbox %s' % ( ';'.join( uniqueOutputSandbox ) ) )
      else:
        paramsDict['OutputSandbox'] = {}
        paramsDict['OutputSandbox']['value'] = extraFiles
        paramsDict['OutputSandbox']['type'] = 'JDL'

    if self.addToInputData:
      extraFiles = ';'.join( self.addToInputData )
      if paramsDict.has_key( 'InputData' ):
        currentFiles = paramsDict['InputData']['value']
        finalInputData = extraFiles
        if currentFiles:
          finalInputData = currentFiles + ';' + extraFiles
        uniqueInputData = uniqueElements( finalInputData.split( ';' ) )
        paramsDict['InputData']['value'] = ';'.join( uniqueInputData )
        self.log.verbose( 'Final unique Input Data %s' % ( ';'.join( uniqueInputData ) ) )
      else:
        paramsDict['InputData'] = {}
        paramsDict['InputData']['value'] = extraFiles
        paramsDict['InputData']['type'] = 'JDL'

    # Handle here the Parametric values
    if self.parametric:
      for pType in ['InputData', 'InputSandbox']:
        if self.parametric.has_key( pType ):
          if paramsDict.has_key( pType ) and paramsDict[pType]['value']:
            pData = self.parametric[pType]
            # List of lists case
            currentFiles = paramsDict[pType]['value'].split( ';' )
            tmpList = []
            if type( pData[0] ) == list:
              for pElement in pData:
                tmpList.append( currentFiles + pElement )
            else:
              for pElement in pData:
                tmpList.append( currentFiles + [pElement] )
            self.parametric[pType] = tmpList

          paramsDict[pType] = {}
          paramsDict[pType]['value'] = "%s"
          paramsDict[pType]['type'] = 'JDL'
          self.parametric['files'] = self.parametric[pType]
          arguments.append( ' -p Parametric' + pType + '=%s' )
          break

      if self.parametric.has_key( 'files' ):
        paramsDict['Parameters'] = {}
        paramsDict['Parameters']['value'] = self.parametric['files']
        paramsDict['Parameters']['type'] = 'JDL'
      if self.parametric.has_key( 'GenericParameters' ):
        paramsDict['Parameters'] = {}
        paramsDict['Parameters']['value'] = self.parametric['GenericParameters']
        paramsDict['Parameters']['type'] = 'JDL'
        arguments.append( ' -p ParametricParameters=%s' )
    ##This needs to be put here so that the InputData and/or InputSandbox parameters for parametric jobs are processed
    classadJob.insertAttributeString( 'Arguments', ' '.join( arguments ) )

    #Add any JDL parameters to classad obeying lists with ';' rule
    requirements = False
    for name, props in paramsDict.items():
      ptype = props['type']
      value = props['value']
      if name.lower() == 'requirements' and ptype == 'JDL':
        self.log.verbose( 'Found existing requirements: %s' % ( value ) )
        requirements = True

      if re.search( '^JDL', ptype ):
        if type( value ) == list:
          if type( value[0] ) == list:
            classadJob.insertAttributeVectorStringList( name, value )
          else:
            classadJob.insertAttributeVectorString( name, value )
        elif value == "%s":
          classadJob.insertAttributeInt( name, value )
        elif not re.search( ';', value ) or name == 'GridRequirements': #not a nice fix...
          classadJob.insertAttributeString( name, value )
        else:
          classadJob.insertAttributeVectorString( name, value.split( ';' ) )

    if not requirements:
      reqtsDict = self.reqParams
      exprn = ''
      plus = ''
      for name, props in paramsDict.items():
        ptype = paramsDict[name]['type']
        value = paramsDict[name]['value']
        if not ptype == 'dict':
          if ptype == 'JDLReqt':
            if value and not value.lower() == 'any':
              plus = ' && '
              if re.search( ';', value ):
                for val in value.split( ';' ):
                  exprn += reqtsDict[name].replace( 'NAME', name ).replace( 'VALUE', str( val ) ) + plus
              else:
                exprn += reqtsDict[name].replace( 'NAME', name ).replace( 'VALUE', str( value ) ) + plus

      if len( plus ):
        exprn = exprn[:-len( plus )]
      if not exprn:
        exprn = 'true'
      self.log.verbose( 'Requirements: %s' % ( exprn ) )
      #classadJob.set_expression('Requirements', exprn)

    self.addToInputSandbox.remove( scriptname )
    self.addToOutputSandbox.remove( self.stdout )
    self.addToOutputSandbox.remove( self.stderr )
    jdl = classadJob.asJDL()
    start = jdl.find( '[' )
    end = jdl.rfind( ']' )
    return jdl[( start + 1 ):( end - 1 )]
Пример #51
0
def getDestinationSEList(outputSE, site, outputmode='Any'):
  """ Evaluate the output SE list from a workflow and return the concrete list
      of SEs to upload output data.
  """
  # Add output SE defined in the job description
  gLogger.info('Resolving workflow output SE description: %s' % outputSE)

  # Check if the SE is defined explicitly for the site
  prefix = site.split('.')[0]
  country = site.split('.')[-1]
  # Concrete SE name
  result = gConfig.getOptions('/Resources/StorageElements/' + outputSE)
  if result['OK']:
    gLogger.info('Found concrete SE %s' % outputSE)
    return S_OK([outputSE])
  # There is an alias defined for this Site
  alias_se = gConfig.getValue('/Resources/Sites/%s/%s/AssociatedSEs/%s' % (prefix, site, outputSE), [])
  if alias_se:
    gLogger.info('Found associated SE for site %s' % (alias_se))
    return S_OK(alias_se)

  localSEs = getSEsForSite(site)['Value']
  gLogger.verbose('Local SE list is: %s' % (localSEs))
  groupSEs = gConfig.getValue('/Resources/StorageElementGroups/' + outputSE, [])
  gLogger.verbose('Group SE list is: %s' % (groupSEs))
  if not groupSEs:
    return S_ERROR('Failed to resolve SE ' + outputSE)

  if outputmode.lower() == "local":
    for se in localSEs:
      if se in groupSEs:
        gLogger.info('Found eligible local SE: %s' % (se))
        return S_OK([se])

    #check if country is already one with associated SEs
    associatedSE = gConfig.getValue('/Resources/Countries/%s/AssociatedSEs/%s' % (country, outputSE), '')
    if associatedSE:
      gLogger.info('Found associated SE %s in /Resources/Countries/%s/AssociatedSEs/%s' % (associatedSE, country, outputSE))
      return S_OK([associatedSE])

    # Final check for country associated SE
    count = 0
    assignedCountry = country
    while count < 10:
      gLogger.verbose('Loop count = %s' % (count))
      gLogger.verbose("/Resources/Countries/%s/AssignedTo" % assignedCountry)
      opt = gConfig.getOption("/Resources/Countries/%s/AssignedTo" % assignedCountry)
      if opt['OK'] and opt['Value']:
        assignedCountry = opt['Value']
        gLogger.verbose('/Resources/Countries/%s/AssociatedSEs' % assignedCountry)
        assocCheck = gConfig.getOption('/Resources/Countries/%s/AssociatedSEs' % assignedCountry)
        if assocCheck['OK'] and assocCheck['Value']:
          break
      count += 1

    if not assignedCountry:
      return S_ERROR('Could not determine associated SE list for %s' % country)

    alias_se = gConfig.getValue('/Resources/Countries/%s/AssociatedSEs/%s' % (assignedCountry, outputSE), [])
    if alias_se:
      gLogger.info('Found alias SE for site: %s' % alias_se)
      return S_OK(alias_se)
    else:
      gLogger.error('Could not establish alias SE for country %s from section: /Resources/Countries/%s/AssociatedSEs/%s' % (country, assignedCountry, outputSE))
      return S_ERROR('Failed to resolve SE ' + outputSE)

  # For collective Any and All modes return the whole group

  # Make sure that local SEs are passing first
  newSEList = []
  for se in groupSEs:
    if se in localSEs:
      newSEList.append(se)
  uniqueSEs = uniqueElements(newSEList + groupSEs)
  gLogger.verbose('Found unique SEs: %s' % (uniqueSEs))
  return S_OK(uniqueSEs)
Пример #52
0
    def _prepareRemoteHost(self, host=None):
        """Prepare remote directories and upload control script"""

        ssh = SSH(host=host, parameters=self.ceParameters)

        # Make remote directories
        dirTuple = tuple(
            uniqueElements([
                self.sharedArea, self.executableArea, self.infoArea,
                self.batchOutput, self.batchError, self.workArea
            ]))
        nDirs = len(dirTuple)
        cmd = "mkdir -p %s; " * nDirs % dirTuple
        cmd = "bash -c '%s'" % cmd
        self.log.verbose("Creating working directories on %s" %
                         self.ceParameters["SSHHost"])
        result = ssh.sshCall(30, cmd)
        if not result["OK"]:
            self.log.error("Failed creating working directories",
                           "(%s)" % result["Message"][1])
            return result
        status, output, _error = result["Value"]
        if status == -1:
            self.log.error("Timeout while creating directories")
            return S_ERROR(errno.ETIME, "Timeout while creating directories")
        if "cannot" in output:
            self.log.error("Failed to create directories", "(%s)" % output)
            return S_ERROR(errno.EACCES, "Failed to create directories")

        # Upload the control script now
        result = self._generateControlScript()
        if not result["OK"]:
            self.log.warn("Failed generating control script")
            return result
        localScript = result["Value"]
        self.log.verbose("Uploading %s script to %s" %
                         (self.batchSystem.__class__.__name__,
                          self.ceParameters["SSHHost"]))
        remoteScript = "%s/execute_batch" % self.sharedArea
        result = ssh.scpCall(30,
                             localScript,
                             remoteScript,
                             postUploadCommand="chmod +x %s" % remoteScript)
        if not result["OK"]:
            self.log.warn("Failed uploading control script: %s" %
                          result["Message"][1])
            return result
        status, output, _error = result["Value"]
        if status != 0:
            if status == -1:
                self.log.warn("Timeout while uploading control script")
                return S_ERROR("Timeout while uploading control script")
            self.log.warn("Failed uploading control script: %s" % output)
            return S_ERROR("Failed uploading control script")

        # Delete the generated control script locally
        try:
            os.remove(localScript)
        except OSError:
            self.log.warn(
                "Failed removing the generated control script locally")
            return S_ERROR(
                "Failed removing the generated control script locally")

        # Chmod the control scripts
        # self.log.verbose( 'Chmod +x control script' )
        # result = ssh.sshCall( 10, "chmod +x %s/%s" % ( self.sharedArea, self.controlScript ) )
        # if not result['OK']:
        #  self.log.warn( 'Failed chmod control script: %s' % result['Message'][1] )
        #  return result
        # status, output, _error = result['Value']
        # if status != 0:
        #  if status == -1:
        #    self.log.warn( 'Timeout while chmod control script' )
        #    return S_ERROR( 'Timeout while chmod control script' )
        #  else:
        #    self.log.warn( 'Failed uploading chmod script: %s' % output )
        #    return S_ERROR( 'Failed uploading chmod script' )

        return S_OK()
Пример #53
0
            elif tarfile.is_tarfile(fname):
                try:
                    gLogger.debug('Unpacking catalog XML file %s' %
                                  (os.path.join(directory, fname)))
                    tarFile = tarfile.open(os.path.join(directory, fname), 'r')
                    for member in tarFile.getmembers():
                        tarFile.extract(member, directory)
                        poolCatalogList.append(
                            os.path.join(directory, member.name))
                except Exception, x:
                    gLogger.error('Could not untar %s with exception %s' %
                                  (fname, str(x)))
            else:
                poolCatalogList.append(fname)

    poolCatalogList = uniqueElements(poolCatalogList)

    #Now have list of all XML files but some may not be Pool XML catalogs...
    finalCatList = []
    for possibleCat in poolCatalogList:
        try:
            cat = PoolXMLCatalog(possibleCat)
            finalCatList.append(possibleCat)
        except Exception, x:
            gLogger.debug('Ignoring non-POOL catalogue file %s' % possibleCat)

    gLogger.debug('Final list of catalog files are: %s' %
                  string.join(finalCatList, ', '))

    return finalCatList