Exemple #1
0
  def execute(self):
    """ Run this.
    """
    if not self.workflowStatus['OK'] or not self.stepStatus['OK']:
      LOG.verbose('Workflow status = %s, step status = %s' % (self.workflowStatus['OK'], self.stepStatus['OK']))
      return S_OK('Workflow status is not OK')
    result = self.resolveInputVariables()
    if not result['OK']:
      LOG.error("Failed to resolve input parameters:", result["Message"])
      return result
    if not self.srmfiles:
      LOG.error('Files txt was not found correctly: %s' % self.srmfiles)
      return S_ERROR('Files txt was not found correctly')
    
    if not isinstance( self.files[0], dict ):
      LOG.error('Files were not found correctly: %s' % self.files)
      return S_ERROR('Files were not found correctly')

    ##Now need to check that there are not that many concurrent jobs getting the overlay at the same time
    max_concurrent_running = self.ops.getValue('/GetSRM/MaxConcurrentRunning', 100)
    error_count = 0
    while 1:
      if error_count > 10 :
        LOG.error('JobDB Content does not return expected dictionary')
        return S_ERROR('Failed to get number of concurrent overlay jobs')
      jobMonitor = JobMonitoringClient()
      res = jobMonitor.getCurrentJobCounters({'ApplicationStatus':'Downloading SRM files'})
      if not res['OK']:
        error_count += 1 
        time.sleep(60)
        continue
      running = 0
      if 'Running' in res['Value']:
        running = res['Value']['Running']
      if running < max_concurrent_running:
        break
      else:
        time.sleep(60)        

    self.setApplicationStatus('Downloading SRM files')
    for filed in self.files:
      if 'file' not in filed or 'site' not in filed:
        LOG.error('Dictionnary does not contain correct keys')
        return S_ERROR('Dictionnary does not contain correct keys')
      start = os.getcwd()
      downloadDir = tempfile.mkdtemp(prefix = 'InputData_%s' % (self.counter), dir = start)
      os.chdir(downloadDir)
      storageElement = StorageElement( filed['site'] )
      result = storageElement.getFile( filed['file'] )
      if result['Value']['Failed']:
        result = storageElement.getFile( filed['file'] )
      os.chdir(start)
      if result['Value']['Failed']:
        LOG.error("Failed to get the file from storage:", result['Value']['Failed'])
        return result
      self.counter += 1
      
       
    return S_OK()
 def setUp(
     self,
     mk_getConfigStorageName,
     mk_getConfigStorageOptions,
     mk_getConfigStorageProtocols,
     mk_isLocalSE,
     mk_addAccountingOperation,
 ):
     self.storage = StorageElementItem("FAKE")
     self.storage.vo = "test"
Exemple #3
0
 def setUp( self, _mk_getConfigStorageName, _mk_getConfigStorageOptions, _mk_getConfigStorageProtocols,
            _mk_generateStorage, _mk_isLocalSE, _mk_addAccountingOperation, _mk_dmsHelpers ):
   self.seA = StorageElementItem( 'StorageA' )
   self.seA.vo = 'lhcb'
   self.seB = StorageElementItem( 'StorageB' )
   self.seB.vo = 'lhcb'
   self.seC = StorageElementItem( 'StorageC' )
   self.seC.vo = 'lhcb'
   self.seD = StorageElementItem( 'StorageD' )
   self.seD.vo = 'lhcb'
   self.seE = StorageElementItem( 'StorageE' )
   self.seE.vo = 'lhcb'
Exemple #4
0
    def setUp(
        self,
        mk_getConfigStorageName,
        mk_getConfigStorageOptions,
        mk_getConfigStorageProtocols,
        mk_isLocalSE,
        mk_addAccountingOperation,
    ):
        self.se = StorageElementItem("FAKE")
        self.se.vo = "test"

        self.basePath = tempfile.mkdtemp(dir="/tmp")
        # Update the basePath of the plugin
        self.se.storages[0].basePath = self.basePath

        self.srcPath = tempfile.mkdtemp(dir="/tmp")

        self.destPath = tempfile.mkdtemp(dir="/tmp")

        self.existingFile = "/test/file.txt"
        self.existingFileSize = 0

        self.nonExistingFile = "/test/nonExistingFile.txt"
        self.subDir = "/test/subDir"
        self.subFile = os.path.join(self.subDir, "subFile.txt")
        self.subFileSize = 0

        self.FILES = [self.existingFile, self.nonExistingFile, self.subFile]
        self.DIRECTORIES = [self.subDir]
        self.ALL = self.FILES + self.DIRECTORIES

        with open(
                os.path.join(self.srcPath,
                             self.existingFile.replace("/test/", "")),
                "w") as f:
            f.write("I put something in the file so that it has a size\n")
        self.existingFileSize = os.path.getsize(
            os.path.join(self.srcPath, self.existingFile.replace("/test/",
                                                                 "")))

        assert self.existingFileSize

        os.mkdir(os.path.join(self.srcPath, os.path.basename(self.subDir)))

        with open(
                os.path.join(self.srcPath, self.subFile.replace("/test/", "")),
                "w") as f:
            f.write("This one should have a size as well\n")
        self.subFileSize = os.path.getsize(
            os.path.join(self.srcPath, self.subFile.replace("/test/", "")))

        assert self.subFileSize
Exemple #5
0
class StoragePlugInTestCase( unittest.TestCase ):
  """ Base test class. Defines all the method to test
  """

  @mock.patch( 'DIRAC.Resources.Storage.StorageFactory.StorageFactory._getConfigStorageName',
               side_effect = mock_StorageFactory_getConfigStorageName )
  @mock.patch( 'DIRAC.Resources.Storage.StorageFactory.StorageFactory._getConfigStorageOptions',
               side_effect = mock_StorageFactory_getConfigStorageOptions )
  @mock.patch( 'DIRAC.Resources.Storage.StorageFactory.StorageFactory._getConfigStorageProtocols',
               side_effect = mock_StorageFactory_getConfigStorageProtocols )
  @mock.patch( 'DIRAC.Resources.Storage.StorageElement.StorageElementItem._StorageElementItem__isLocalSE',
               return_value = S_OK( True ) )  # Pretend it's local
  @mock.patch( 'DIRAC.Resources.Storage.StorageElement.StorageElementItem.addAccountingOperation',
               return_value = None )  # Don't send accounting
  # @mock.patch( 'DIRAC.Resources.Storage.StorageFactory.StorageFactory._getCurrentURL',
  #              side_effect = mock_StorageFactory_getCurrentURL_getCurrentURL )
  def setUp( self, mk_getConfigStorageName, mk_getConfigStorageOptions, mk_getConfigStorageProtocols,
             mk_isLocalSE, mk_addAccountingOperation ):
    self.storage = StorageElementItem( 'FAKE' )
    self.storage.vo = 'test'

    # self.storage = storageDetails['StorageObjects'][0]
    # self.storage.changeDirectory( 'lhcb/test/unit-test/Storage/RFIOStorage' )

  def test_createUnitTestDir( self ):
    print('\n\n#########################################################'
          '################\n\n\t\t\tCreate Directory test\n')
    # destDir = self.storage.getCurrentURL( '' )['Value']
    destDir = '/bla/'
    res = self.storage.createDirectory( destDir )
    print(res)
    self.assertTrue(res['OK'])
    self.assertTrue( destDir in res['Value']['Successful'] )
    self.assertTrue( res['Value']['Successful'][destDir] )
class StoragePlugInTestCase( unittest.TestCase ):
  """ Base test class. Defines all the method to test
  """

  @mock.patch( 'DIRAC.Resources.Storage.StorageFactory.StorageFactory._getConfigStorageName',
               side_effect = mock_StorageFactory_getConfigStorageName )
  @mock.patch( 'DIRAC.Resources.Storage.StorageFactory.StorageFactory._getConfigStorageOptions',
               side_effect = mock_StorageFactory_getConfigStorageOptions )
  @mock.patch( 'DIRAC.Resources.Storage.StorageFactory.StorageFactory._getConfigStorageProtocols',
               side_effect = mock_StorageFactory_getConfigStorageProtocols )
  @mock.patch( 'DIRAC.Resources.Storage.StorageElement.StorageElementItem._StorageElementItem__isLocalSE',
               return_value = S_OK( True ) )  # Pretend it's local
  @mock.patch( 'DIRAC.Resources.Storage.StorageElement.StorageElementItem.addAccountingOperation',
               return_value = None )  # Don't send accounting
  # @mock.patch( 'DIRAC.Resources.Storage.StorageFactory.StorageFactory._getCurrentURL',
  #              side_effect = mock_StorageFactory_getCurrentURL_getCurrentURL )
  def setUp( self, mk_getConfigStorageName, mk_getConfigStorageOptions, mk_getConfigStorageProtocols,
             mk_isLocalSE, mk_addAccountingOperation ):
    self.storage = StorageElementItem( 'FAKE' )
    self.storage.vo = 'test'

    # self.storage = storageDetails['StorageObjects'][0]
    # self.storage.changeDirectory( 'lhcb/test/unit-test/Storage/RFIOStorage' )

  def test_createUnitTestDir( self ):
    print '\n\n#########################################################################\n\n\t\t\tCreate Directory test\n'
    # destDir = self.storage.getCurrentURL( '' )['Value']
    destDir = '/bla/'
    res = self.storage.createDirectory( destDir )
    print res
    self.assertTrue(res['OK'])
    self.assertTrue( res['Value']['Successful'].has_key( destDir ) )
    self.assertTrue( res['Value']['Successful'][destDir] )
Exemple #7
0
    def setUp(self, mk_getConfigStorageName, mk_getConfigStorageOptions,
              mk_getConfigStorageProtocols, mk_isLocalSE):
        self.se = StorageElementItem('FAKE')

        self.basePath = tempfile.mkdtemp(dir='/tmp')
        # Update the basePath of the plugin
        self.se.storages[0].basePath = self.basePath

        self.srcPath = tempfile.mkdtemp(dir='/tmp')

        self.destPath = tempfile.mkdtemp(dir='/tmp')

        self.existingFile = '/lhcb/file.txt'
        self.existingFileSize = 0

        self.nonExistingFile = '/lhcb/nonExistingFile.txt'
        self.subDir = '/lhcb/subDir'
        self.subFile = os.path.join(self.subDir, 'subFile.txt')
        self.subFileSize = 0

        self.FILES = [self.existingFile, self.nonExistingFile, self.subFile]
        self.DIRECTORIES = [self.subDir]
        self.ALL = self.FILES + self.DIRECTORIES

        with open(
                os.path.join(self.srcPath,
                             self.existingFile.replace('/lhcb/', '')),
                'w') as f:
            f.write("I put something in the file so that it has a size\n")
        self.existingFileSize = os.path.getsize(
            os.path.join(self.srcPath, self.existingFile.replace('/lhcb/',
                                                                 '')))

        assert self.existingFileSize

        os.mkdir(os.path.join(self.srcPath, os.path.basename(self.subDir)))

        with open(
                os.path.join(self.srcPath, self.subFile.replace('/lhcb/', '')),
                'w') as f:
            f.write("This one should have a size as well\n")
        self.subFileSize = os.path.getsize(
            os.path.join(self.srcPath, self.subFile.replace('/lhcb/', '')))

        assert self.subFileSize
 def setUp(
     self,
     mk_getConfigStorageName,
     mk_getConfigStorageOptions,
     mk_getConfigStorageProtocols,
     mk_isLocalSE,
     mk_addAccountingOperation,
 ):
     self.storage = StorageElementItem("FAKE")
     self.storage.vo = "test"
Exemple #9
0
def _getProdLogs():
  """get production log files from LogSE"""
  clip = _Params()
  clip.registerSwitch()
  Script.parseCommandLine()
  if not ( clip.logF or clip.logD or clip.prodid ):
    Script.showHelp()
    dexit(1)
  from DIRAC.ConfigurationSystem.Client.Helpers.Operations import Operations
  ops = Operations()
  storageElementName = ops.getValue('/LogStorage/LogSE', 'LogSE')
  from DIRAC.Resources.Storage.StorageElement import StorageElementItem as StorageElement
  logSE = StorageElement(storageElementName)

  if clip.prodid and not ( clip.logD or clip.logF ):
    result = _getLogFolderFromID( clip )
    if not result['OK']:
      gLogger.error( result['Message'] )
      dexit(1)

  if clip.logD:
    if not clip.noPromptBeforeDL:
      res = promptUser('Are you sure you want to get ALL the files in this directory?')
      if not res['OK']:
        dexit()
      choice = res['Value']
      if choice.lower()=='n':
        dexit(0)
  
    if isinstance(clip.logD, str):
      res = logSE.getDirectory(clip.logD, localPath=clip.outputdir)
      _printErrorReport(res)
    elif isinstance(clip.logD, list):
      for logdir in clip.logD:
        gLogger.notice('Getting log files from '+str(logdir))
        res = logSE.getDirectory(logdir, localPath=clip.outputdir)
        _printErrorReport(res)

  if clip.logF:
    res = logSE.getFile(clip.logF, localPath = clip.outputdir)
    _printErrorReport(res)
def _getProdLogs():
  """get production log files from LogSE"""
  clip = _Params()
  clip.registerSwitch()
  Script.parseCommandLine()
  if not ( clip.logF or clip.logD or clip.prodid ):
    Script.showHelp()
    dexit(1)
  from DIRAC.ConfigurationSystem.Client.Helpers.Operations import Operations
  ops = Operations()
  storageElementName = ops.getValue('/LogStorage/LogSE', 'LogSE')
  from DIRAC.Resources.Storage.StorageElement import StorageElementItem as StorageElement
  logSE = StorageElement(storageElementName)

  if clip.prodid and not ( clip.logD or clip.logF ):
    result = _getLogFolderFromID( clip )
    if not result['OK']:
      gLogger.error( result['Message'] )
      dexit(1)

  if clip.logD:
    if not clip.noPromptBeforeDL:
      res = promptUser('Are you sure you want to get ALL the files in this directory?')
      if not res['OK']:
        dexit()
      choice = res['Value']
      if choice.lower()=='n':
        dexit(0)
  
    if isinstance(clip.logD, str):
      res = logSE.getDirectory(clip.logD, localPath=clip.outputdir)
      _printErrorReport(res)
    elif isinstance(clip.logD, list):
      for logdir in clip.logD:
        gLogger.notice('Getting log files from '+str(logdir))
        res = logSE.getDirectory(logdir, localPath=clip.outputdir)
        _printErrorReport(res)

  if clip.logF:
    res = logSE.getFile(clip.logF, localPath = clip.outputdir)
    _printErrorReport(res)
Exemple #11
0
 def __init__(self):
   """Module initialization.
   """
   super(UploadLogFile, self).__init__()
   self.version = __RCSID__
   self.productionID = None
   self.jobID = None
   self.workflow_commons = None
   self.logFilePath = ""
   self.logLFNPath = ""
   self.logdir = ''
   self.logSE = StorageElement( self.ops.getValue('/LogStorage/LogSE', 'LogSE') )
   self.root = gConfig.getValue('/LocalSite/Root', os.getcwd())
   self.logSizeLimit = self.ops.getValue('/LogFiles/SizeLimit', 20 * 1024 * 1024)
   self.logExtensions = []
   self.failoverSEs = gConfig.getValue('/Resources/StorageElementGroups/Tier1-Failover', [])    
   self.experiment = 'CLIC'
   self.enable = True
   self.failoverTest = False #flag to put log files to failover by default
   self.jobID = ''
  def setUp( self, mk_getConfigStorageName, mk_getConfigStorageOptions, mk_getConfigStorageProtocols, mk_isLocalSE, mk_addAccountingOperation ):
    self.se = StorageElementItem( 'FAKE' )
    self.se.vo = 'test'

    self.basePath = tempfile.mkdtemp( dir = '/tmp' )
    # Update the basePath of the plugin
    self.se.storages[0].basePath = self.basePath

    self.srcPath = tempfile.mkdtemp( dir = '/tmp' )

    self.destPath = tempfile.mkdtemp( dir = '/tmp' )


    self.existingFile = '/test/file.txt'
    self.existingFileSize = 0

    self.nonExistingFile = '/test/nonExistingFile.txt'
    self.subDir = '/test/subDir'
    self.subFile = os.path.join( self.subDir, 'subFile.txt' )
    self.subFileSize = 0

    self.FILES = [self.existingFile, self.nonExistingFile, self.subFile]
    self.DIRECTORIES = [self.subDir]
    self.ALL = self.FILES + self.DIRECTORIES


    with open( os.path.join( self.srcPath, self.existingFile.replace( '/test/', '' ) ), 'w' ) as f:
      f.write( "I put something in the file so that it has a size\n" )
    self.existingFileSize = os.path.getsize( os.path.join( self.srcPath, self.existingFile.replace( '/test/', '' ) ) )

    assert self.existingFileSize

    os.mkdir( os.path.join( self.srcPath, os.path.basename( self.subDir ) ) )

    with open( os.path.join( self.srcPath, self.subFile.replace( '/test/', '' ) ), 'w' ) as f:
      f.write( "This one should have a size as well\n" )
    self.subFileSize = os.path.getsize( os.path.join( self.srcPath, self.subFile.replace( '/test/', '' ) ) )

    assert self.subFileSize
Exemple #13
0
 def __init__(self):
     """Module initialization.
 """
     super(UploadLogFile, self).__init__()
     self.version = __RCSID__
     self.log = gLogger.getSubLogger('UploadLogFile')
     self.productionID = None
     self.jobID = None
     self.workflow_commons = None
     self.logFilePath = ""
     self.logLFNPath = ""
     self.logdir = ''
     self.logSE = StorageElement(
         self.ops.getValue('/LogStorage/LogSE', 'LogSE'))
     self.root = gConfig.getValue('/LocalSite/Root', os.getcwd())
     self.logSizeLimit = self.ops.getValue('/LogFiles/SizeLimit',
                                           20 * 1024 * 1024)
     self.logExtensions = []
     self.failoverSEs = gConfig.getValue(
         '/Resources/StorageElementGroups/Tier1-Failover', [])
     self.experiment = 'CLIC'
     self.enable = True
     self.failoverTest = False  #flag to put log files to failover by default
     self.jobID = ''
class TestBase( unittest.TestCase ):
  """ Base test class. Defines all the method to test
  """


  @mock.patch( 'DIRAC.Resources.Storage.StorageFactory.StorageFactory._getConfigStorageName',
                side_effect = mock_StorageFactory_getConfigStorageName )
  @mock.patch( 'DIRAC.Resources.Storage.StorageFactory.StorageFactory._getConfigStorageOptions',
                side_effect = mock_StorageFactory_getConfigStorageOptions )
  @mock.patch( 'DIRAC.Resources.Storage.StorageFactory.StorageFactory._getConfigStorageProtocols',
                side_effect = mock_StorageFactory_getConfigStorageProtocols )
  @mock.patch( 'DIRAC.Resources.Storage.StorageElement.StorageElementItem._StorageElementItem__isLocalSE',
                return_value = S_OK( True ) )  # Pretend it's local
  @mock.patch( 'DIRAC.Resources.Storage.StorageElement.StorageElementItem.addAccountingOperation',
                return_value = None )  # Don't send accounting
  def setUp( self, mk_getConfigStorageName, mk_getConfigStorageOptions, mk_getConfigStorageProtocols, mk_isLocalSE, mk_addAccountingOperation ):
    self.se = StorageElementItem( 'FAKE' )
    self.se.vo = 'test'

    self.basePath = tempfile.mkdtemp( dir = '/tmp' )
    # Update the basePath of the plugin
    self.se.storages[0].basePath = self.basePath

    self.srcPath = tempfile.mkdtemp( dir = '/tmp' )

    self.destPath = tempfile.mkdtemp( dir = '/tmp' )


    self.existingFile = '/test/file.txt'
    self.existingFileSize = 0

    self.nonExistingFile = '/test/nonExistingFile.txt'
    self.subDir = '/test/subDir'
    self.subFile = os.path.join( self.subDir, 'subFile.txt' )
    self.subFileSize = 0

    self.FILES = [self.existingFile, self.nonExistingFile, self.subFile]
    self.DIRECTORIES = [self.subDir]
    self.ALL = self.FILES + self.DIRECTORIES


    with open( os.path.join( self.srcPath, self.existingFile.replace( '/test/', '' ) ), 'w' ) as f:
      f.write( "I put something in the file so that it has a size\n" )
    self.existingFileSize = os.path.getsize( os.path.join( self.srcPath, self.existingFile.replace( '/test/', '' ) ) )

    assert self.existingFileSize

    os.mkdir( os.path.join( self.srcPath, os.path.basename( self.subDir ) ) )

    with open( os.path.join( self.srcPath, self.subFile.replace( '/test/', '' ) ), 'w' ) as f:
      f.write( "This one should have a size as well\n" )
    self.subFileSize = os.path.getsize( os.path.join( self.srcPath, self.subFile.replace( '/test/', '' ) ) )

    assert self.subFileSize


  def tearDown(self):
    shutil.rmtree( self.basePath )
    shutil.rmtree( self.srcPath )
    shutil.rmtree( self.destPath )
    pass



  def walkAll( self ):
    for dirname in [self.basePath, self.destPath]:
      self.walkPath( dirname )

  def walkPath(self, path):
    for root, dirs, files in os.walk( path ):
      print root
      print "  dirs"
      for d in dirs:
        print "    ", os.path.join( root, d )
      print "  files"
      for f in files:
        print "    ", os.path.join( root, f )


  @mock.patch( 'DIRAC.Resources.Storage.StorageElement.StorageElementItem._StorageElementItem__isLocalSE',
                return_value = S_OK( True ) )  # Pretend it's local
  @mock.patch( 'DIRAC.Resources.Storage.StorageElement.StorageElementItem.addAccountingOperation',
                return_value = None )  # Don't send accounting
  def test_01_getURL( self, mk_isLocalSE, mk_addAccounting ):
    """Testing getURL"""
    # Testing the getURL
    res = self.se.getURL( self.ALL )
    self.assert_( res['OK'], res )
    self.assert_( not res['Value']['Failed'], res['Value']['Failed'] )
    self.assert_( len( res['Value']['Successful'] ) == len( self.ALL ) )
    for lfn, url in res['Value']['Successful'].items():
      self.assertEqual( url, self.basePath.rstrip( '/' ) + lfn )



  @mock.patch( 'DIRAC.Resources.Storage.StorageElement.StorageElementItem._StorageElementItem__isLocalSE',
                return_value = S_OK( True ) )  # Pretend it's local
  @mock.patch( 'DIRAC.Resources.Storage.StorageElement.StorageElementItem.addAccountingOperation',
                return_value = None )  # Don't send accounting
  def test_02_FileTest( self, mk_isLocalSE, mk_addAccounting ):
    """Testing createDirectory"""
    # Putting the files

    def localPutFile( fn, size = 0 ):
      """If fn is '/test/fn.txt', it calls
        { '/test/fn.txt' : /tmp/generatedPath/fn.txt}
      """
      transfDic = { fn  : os.path.join( self.srcPath, fn.replace( '/test/', '' ) )}
      return self.se.putFile( transfDic, sourceSize = size )

    # wrong size
    res = localPutFile( self.existingFile, size = -1 )
    self.assert_( res['OK'], res )
    self.assert_( self.existingFile in res['Value']['Failed'] )
    self.assert_( 'not match' in res['Value']['Failed'][self.existingFile], res )
    self.assert_( not os.path.exists( self.basePath + self.existingFile ) )

    # Correct size
    res = localPutFile( self.existingFile, size = self.existingFileSize )
    self.assert_( res['OK'], res )
    self.assert_( self.existingFile in res['Value']['Successful'], res )
    self.assert_( os.path.exists( self.basePath + self.existingFile ) )

    # No size
    res = localPutFile( self.existingFile )
    self.assert_( res['OK'], res )
    self.assert_( self.existingFile in res['Value']['Successful'], res )
    self.assert_( os.path.exists( self.basePath + self.existingFile ) )

    # No existing source file
    res = localPutFile( self.nonExistingFile )
    self.assert_( res['OK'], res )
    self.assert_( self.nonExistingFile in res['Value']['Failed'], res )
    self.assert_( os.strerror( errno.ENOENT ) in res['Value']['Failed'][self.nonExistingFile], res )

    # sub file
    res = localPutFile( self.subFile )
    self.assert_( res['OK'], res )
    self.assert_( self.subFile in res['Value']['Successful'], res )
    self.assert_( os.path.exists( self.basePath + self.subFile ) )

    # Directory
    res = localPutFile( self.subDir )
    self.assert_( res['OK'], res )
    self.assert_( self.subDir in res['Value']['Failed'] )
    self.assert_( os.strerror( errno.EISDIR ) in res['Value']['Failed'][self.subDir], res )


    res = self.se.exists( self.FILES )
    self.assert_( res['OK'], res )
    self.assert_( not res['Value']['Failed'], res )
    self.assert_( res['Value']['Successful'][self.existingFile], res )
    self.assert_( not res['Value']['Successful'][self.nonExistingFile], res )

    res = self.se.getFileSize( self.ALL )
    self.assert_( res['OK'], res )
    self.assertEqual( res['Value']['Successful'][self.existingFile], self.existingFileSize )
    self.assert_( os.strerror( errno.ENOENT ) in res['Value']['Failed'][self.nonExistingFile], res )
    self.assert_( os.strerror( errno.EISDIR ) in res['Value']['Failed'][self.subDir], res )


    res = self.se.getFileMetadata( self.ALL )
    self.assert_( res['OK'], res )
    self.assert_( self.existingFile in res['Value']['Successful'] )
    self.assert_( os.strerror( errno.ENOENT ) in res['Value']['Failed'][self.nonExistingFile], res )
    self.assert_( os.strerror( errno.EISDIR ) in res['Value']['Failed'][self.subDir], res )


    res = self.se.isFile( self.ALL )
    self.assert_( res['OK'], res )
    self.assert_( res['Value']['Successful'][self.existingFile], res )
    self.assert_( not res['Value']['Successful'][self.subDir], res )
    self.assert_( os.strerror( errno.ENOENT ) in res['Value']['Failed'][self.nonExistingFile], res )

    res = self.se.getFile( self.ALL, localPath = self.destPath )
    self.assert_( res['OK'], res )
    self.assertEqual( res['Value']['Successful'][self.existingFile], self.existingFileSize )
    self.assert_( os.path.exists( os.path.join( self.destPath, os.path.basename( self.existingFile ) ) ) )
    self.assertEqual( res['Value']['Successful'][self.subFile], self.subFileSize )
    self.assert_( os.path.exists( os.path.join( self.destPath, os.path.basename( self.subFile ) ) ) )
    self.assert_( os.strerror( errno.ENOENT ) in res['Value']['Failed'][self.nonExistingFile], res )
    self.assert_( os.strerror( errno.EISDIR ) in res['Value']['Failed'][self.subDir], res )


    res = self.se.removeFile( self.ALL )
    self.assert_( res['OK'], res )
    self.assert_( res['Value']['Successful'][self.existingFile] )
    self.assert_( not os.path.exists( self.basePath + self.existingFile ) )
    self.assert_( res['Value']['Successful'][self.subFile] )
    self.assert_( not os.path.exists( self.basePath + self.subFile ) )
    self.assert_( res['Value']['Successful'][self.nonExistingFile] )
    self.assert_( os.strerror( errno.EISDIR ) in res['Value']['Failed'][self.subDir] )



  @mock.patch( 'DIRAC.Resources.Storage.StorageElement.StorageElementItem._StorageElementItem__isLocalSE',
                return_value = S_OK( True ) )  # Pretend it's local
  @mock.patch( 'DIRAC.Resources.Storage.StorageElement.StorageElementItem.addAccountingOperation',
                return_value = None )  # Don't send accounting
  def test_03_createDirectory( self, mk_isLocalSE, mk_addAccounting ):
    """Testing creating directories"""


    res = self.se.createDirectory( self.subDir )
    self.assert_( res['OK'], res )
    self.assert_( self.subDir in res['Value']['Successful'] )
    self.assert_( os.path.exists( self.basePath + self.subDir ) )


  @mock.patch( 'DIRAC.Resources.Storage.StorageElement.StorageElementItem._StorageElementItem__isLocalSE',
                return_value = S_OK( True ) )  # Pretend it's local
  @mock.patch( 'DIRAC.Resources.Storage.StorageElement.StorageElementItem.addAccountingOperation',
                return_value = None )  # Don't send accounting
  def test_04_putDirectory( self, mk_isLocalSE, mk_addAccounting ):
    """Testing putDirectory"""

    nonExistingDir = '/test/forsuredoesnotexist'
    localdirs = ['/test', nonExistingDir]

    # Correct size
    res = self.se.putDirectory( { '/test' : self.srcPath} )
    self.assert_( res['OK'], res )
    self.assert_( '/test' in res['Value']['Successful'], res )
    self.assertEqual( res['Value']['Successful']['/test'], {'Files': 2, 'Size': self.existingFileSize + self.subFileSize} )
    self.assert_( os.path.exists( self.basePath + '/test' ) )
    self.assert_( os.path.exists( self.basePath + self.existingFile ) )
    self.assert_( os.path.exists( self.basePath + self.subFile ) )


    # No existing source directory
    res = self.se.putDirectory( { '/test' : nonExistingDir} )
    self.assert_( res['OK'], res )
    self.assert_( '/test' in res['Value']['Failed'], res )
    self.assertEqual( res['Value']['Failed']['/test'], {'Files': 0, 'Size': 0} )

    # sub file
    res = self.se.putDirectory( { '/test' : self.existingFile} )
    self.assert_( res['OK'], res )
    self.assert_( '/test' in res['Value']['Failed'], res )
    self.assertEqual( res['Value']['Failed']['/test'], {'Files': 0, 'Size': 0} )


    res = self.se.exists( self.DIRECTORIES + localdirs )
    self.assert_( res['OK'], res )
    self.assert_( not res['Value']['Failed'], res )
    self.assert_( res['Value']['Successful'][self.subDir], res )
    self.assert_( not res['Value']['Successful'][nonExistingDir], res )

    res = self.se.getDirectorySize( self.ALL + localdirs )
    self.assert_( res['OK'], res )
    self.assertEqual( res['Value']['Successful'][self.subDir], { 'Files' : 1, 'Size' : self.subFileSize, 'SubDirs' : 0 } )
    self.assertEqual( res['Value']['Successful']['/test'], { 'Files' : 1, 'Size' : self.existingFileSize, 'SubDirs' : 1 } )
    self.assert_( os.strerror( errno.ENOENT ) in res['Value']['Failed'][self.nonExistingFile], res )
    self.assert_( os.strerror( errno.ENOTDIR ) in res['Value']['Failed'][self.existingFile], res )
    self.assert_( os.strerror( errno.ENOENT ) in res['Value']['Failed'][nonExistingDir], res )


    res = self.se.getDirectoryMetadata( self.ALL + localdirs )
    self.assert_( res['OK'], res )
    self.assert_( self.subDir in res['Value']['Successful'] )
    self.assert_( os.strerror( errno.ENOENT ) in res['Value']['Failed'][self.nonExistingFile], res )
    self.assert_( os.strerror( errno.ENOENT ) in res['Value']['Failed'][nonExistingDir], res )
    self.assert_( os.strerror( errno.ENOTDIR ) in res['Value']['Failed'][self.existingFile], res )


    res = self.se.isDirectory( self.ALL + localdirs )
    self.assert_( res['OK'], res )
    self.assert_( not res['Value']['Successful'][self.existingFile] )
    self.assert_( res['Value']['Successful'][self.subDir], res )
    self.assert_( os.strerror( errno.ENOENT ) in res['Value']['Failed'][self.nonExistingFile], res )
    self.assert_( os.strerror( errno.ENOENT ) in res['Value']['Failed'][nonExistingDir], res )

    res = self.se.listDirectory( self.ALL + localdirs )
    self.assert_( res['OK'], res )
    self.assertEqual( res['Value']['Successful'][self.subDir], {'Files': [self.subFile], 'SubDirs': []} )
    self.assertEqual( res['Value']['Successful']['/test'], {'Files': [self.existingFile], 'SubDirs': [self.subDir]} )
    self.assert_( os.strerror( errno.ENOENT ) in res['Value']['Failed'][self.nonExistingFile], res )
    self.assert_( os.strerror( errno.ENOTDIR ) in res['Value']['Failed'][self.existingFile], res )
    self.assert_( os.strerror( errno.ENOENT ) in res['Value']['Failed'][nonExistingDir], res )


    res = self.se.getDirectory( self.ALL + localdirs, localPath = self.destPath )
    self.assert_( res['OK'], res )
    self.assertEqual( res['Value']['Successful']['/test'], {'Files' : 2, 'Size' : self.existingFileSize + self.subFileSize} )
    self.assert_( os.path.exists( self.destPath + self.existingFile ) )
    self.assert_( os.path.exists( self.destPath + self.subFile ) )
    self.assertEqual( res['Value']['Successful'][self.subDir], {'Files' : 1, 'Size' : self.subFileSize} )
    self.assert_( os.path.exists( self.destPath + self.subFile.replace( '/test', '' ) ) )
    self.assertEqual( res['Value']['Failed'][self.nonExistingFile], {'Files': 0, 'Size': 0} )
    self.assertEqual( res['Value']['Failed'][self.existingFile], {'Files': 0, 'Size': 0} )
    self.assertEqual( res['Value']['Failed'][nonExistingDir], {'Files': 0, 'Size': 0} )


    res = self.se.removeDirectory( nonExistingDir, recursive = False )
    self.assert_( res['OK'], res )
    self.assertEqual( res['Value']['Successful'][nonExistingDir], True )

    res = self.se.removeDirectory( nonExistingDir, recursive = True )
    self.assert_( res['OK'], res )
    self.assertEqual( res['Value']['Failed'][nonExistingDir], {'FilesRemoved':0, 'SizeRemoved':0} )


    res = self.se.removeDirectory( self.nonExistingFile, recursive = False )
    self.assert_( res['OK'], res )
    self.assertEqual( res['Value']['Successful'][self.nonExistingFile], True )

    res = self.se.removeDirectory( self.nonExistingFile, recursive = True )
    self.assert_( res['OK'], res )
    self.assertEqual( res['Value']['Failed'][self.nonExistingFile], {'FilesRemoved':0, 'SizeRemoved':0} )


    res = self.se.removeDirectory( self.existingFile, recursive = False )
    self.assert_( res['OK'], res )
    self.assert_( os.strerror( errno.ENOTDIR ) in res['Value']['Failed'][self.existingFile], res )

    res = self.se.removeDirectory( self.existingFile, recursive = True )
    self.assert_( res['OK'], res )
    self.assertEqual( res['Value']['Failed'][self.existingFile], {'FilesRemoved':0, 'SizeRemoved':0} )


    res = self.se.removeDirectory( '/test', recursive = False )
    self.assert_( res['OK'], res )
    self.assertEqual( res['Value']['Successful']['/test'], True )
    self.assert_( not os.path.exists( self.basePath + self.existingFile ) )
    self.assert_( os.path.exists( self.basePath + self.subFile ) )

    res = self.se.removeDirectory( '/test', recursive = True )
    self.assert_( res['OK'], res )
    self.assertEqual( res['Value']['Successful']['/test'], {'FilesRemoved':1, 'SizeRemoved':self.subFileSize} )
    self.assert_( not os.path.exists( self.basePath + '/test' ) )
Exemple #15
0
class TestBase(unittest.TestCase):
    """ Base test class. Defines all the method to test
  """
    @mock.patch(
        'DIRAC.Resources.Storage.StorageFactory.StorageFactory._getConfigStorageName',
        side_effect=mock_StorageFactory_getConfigStorageName)
    @mock.patch(
        'DIRAC.Resources.Storage.StorageFactory.StorageFactory._getConfigStorageOptions',
        side_effect=mock_StorageFactory_getConfigStorageOptions)
    @mock.patch(
        'DIRAC.Resources.Storage.StorageFactory.StorageFactory._getConfigStorageProtocols',
        side_effect=mock_StorageFactory_getConfigStorageProtocols)
    @mock.patch(
        'DIRAC.Resources.Storage.StorageElement.StorageElementItem._StorageElementItem__isLocalSE',
        return_value=S_OK(True))  # Pretend it's local
    @mock.patch(
        'DIRAC.Resources.Storage.StorageElement.StorageElementItem.addAccountingOperation',
        return_value=None)  # Don't send accounting
    def setUp(self, mk_getConfigStorageName, mk_getConfigStorageOptions,
              mk_getConfigStorageProtocols, mk_isLocalSE,
              mk_addAccountingOperation):
        self.se = StorageElementItem('FAKE')
        self.se.vo = 'test'

        self.basePath = tempfile.mkdtemp(dir='/tmp')
        # Update the basePath of the plugin
        self.se.storages[0].basePath = self.basePath

        self.srcPath = tempfile.mkdtemp(dir='/tmp')

        self.destPath = tempfile.mkdtemp(dir='/tmp')

        self.existingFile = '/test/file.txt'
        self.existingFileSize = 0

        self.nonExistingFile = '/test/nonExistingFile.txt'
        self.subDir = '/test/subDir'
        self.subFile = os.path.join(self.subDir, 'subFile.txt')
        self.subFileSize = 0

        self.FILES = [self.existingFile, self.nonExistingFile, self.subFile]
        self.DIRECTORIES = [self.subDir]
        self.ALL = self.FILES + self.DIRECTORIES

        with open(
                os.path.join(self.srcPath,
                             self.existingFile.replace('/test/', '')),
                'w') as f:
            f.write("I put something in the file so that it has a size\n")
        self.existingFileSize = os.path.getsize(
            os.path.join(self.srcPath, self.existingFile.replace('/test/',
                                                                 '')))

        assert self.existingFileSize

        os.mkdir(os.path.join(self.srcPath, os.path.basename(self.subDir)))

        with open(
                os.path.join(self.srcPath, self.subFile.replace('/test/', '')),
                'w') as f:
            f.write("This one should have a size as well\n")
        self.subFileSize = os.path.getsize(
            os.path.join(self.srcPath, self.subFile.replace('/test/', '')))

        assert self.subFileSize

    def tearDown(self):
        shutil.rmtree(self.basePath)
        shutil.rmtree(self.srcPath)
        shutil.rmtree(self.destPath)
        pass

    def walkAll(self):
        for dirname in [self.basePath, self.destPath]:
            self.walkPath(dirname)

    def walkPath(self, path):
        for root, dirs, files in os.walk(path):
            print(root)
            print("  dirs")
            for d in dirs:
                print("    ", os.path.join(root, d))
            print("  files")
            for f in files:
                print("    ", os.path.join(root, f))

    @mock.patch(
        'DIRAC.Resources.Storage.StorageElement.StorageElementItem._StorageElementItem__isLocalSE',
        return_value=S_OK(True))  # Pretend it's local
    @mock.patch(
        'DIRAC.Resources.Storage.StorageElement.StorageElementItem.addAccountingOperation',
        return_value=None)  # Don't send accounting
    def test_01_getURL(self, mk_isLocalSE, mk_addAccounting):
        """Testing getURL"""
        # Testing the getURL
        res = self.se.getURL(self.ALL)
        self.assertTrue(res['OK'], res)
        self.assertTrue(not res['Value']['Failed'], res['Value']['Failed'])
        self.assertTrue(len(res['Value']['Successful']) == len(self.ALL))
        for lfn, url in res['Value']['Successful'].items():
            self.assertEqual(url, self.basePath.rstrip('/') + lfn)

    @mock.patch(
        'DIRAC.Resources.Storage.StorageElement.StorageElementItem._StorageElementItem__isLocalSE',
        return_value=S_OK(True))  # Pretend it's local
    @mock.patch(
        'DIRAC.Resources.Storage.StorageElement.StorageElementItem.addAccountingOperation',
        return_value=None)  # Don't send accounting
    def test_02_FileTest(self, mk_isLocalSE, mk_addAccounting):
        """Testing createDirectory"""

        # Putting the files

        def localPutFile(fn, size=0):
            """If fn is '/test/fn.txt', it calls
        { '/test/fn.txt' : /tmp/generatedPath/fn.txt}
      """
            transfDic = {
                fn: os.path.join(self.srcPath, fn.replace('/test/', ''))
            }
            return self.se.putFile(transfDic, sourceSize=size)

        # wrong size
        res = localPutFile(self.existingFile, size=-1)
        self.assertTrue(res['OK'], res)
        self.assertTrue(self.existingFile in res['Value']['Failed'], res)
        self.assertTrue(
            'not match' in res['Value']['Failed'][self.existingFile], res)
        self.assertTrue(not os.path.exists(self.basePath + self.existingFile))

        # Correct size
        res = localPutFile(self.existingFile, size=self.existingFileSize)
        self.assertTrue(res['OK'], res)
        self.assertTrue(self.existingFile in res['Value']['Successful'], res)
        self.assertTrue(os.path.exists(self.basePath + self.existingFile))

        # No size
        res = localPutFile(self.existingFile)
        self.assertTrue(res['OK'], res)
        self.assertTrue(self.existingFile in res['Value']['Successful'], res)
        self.assertTrue(os.path.exists(self.basePath + self.existingFile))

        # No existing source file
        res = localPutFile(self.nonExistingFile)
        self.assertTrue(res['OK'], res)
        self.assertTrue(self.nonExistingFile in res['Value']['Failed'], res)
        self.assertTrue(
            os.strerror(errno.ENOENT)
            in res['Value']['Failed'][self.nonExistingFile], res)

        # sub file
        res = localPutFile(self.subFile)
        self.assertTrue(res['OK'], res)
        self.assertTrue(self.subFile in res['Value']['Successful'], res)
        self.assertTrue(os.path.exists(self.basePath + self.subFile))

        # Directory
        res = localPutFile(self.subDir)
        self.assertTrue(res['OK'], res)
        self.assertTrue(self.subDir in res['Value']['Failed'])
        self.assertTrue(
            os.strerror(errno.EISDIR) in res['Value']['Failed'][self.subDir],
            res)

        res = self.se.exists(self.FILES)
        self.assertTrue(res['OK'], res)
        self.assertTrue(not res['Value']['Failed'], res)
        self.assertTrue(res['Value']['Successful'][self.existingFile], res)
        self.assertTrue(not res['Value']['Successful'][self.nonExistingFile],
                        res)

        res = self.se.getFileSize(self.ALL)
        self.assertTrue(res['OK'], res)
        self.assertEqual(res['Value']['Successful'][self.existingFile],
                         self.existingFileSize)
        self.assertTrue(
            os.strerror(errno.ENOENT)
            in res['Value']['Failed'][self.nonExistingFile], res)
        self.assertTrue(
            os.strerror(errno.EISDIR) in res['Value']['Failed'][self.subDir],
            res)

        res = self.se.getFileMetadata(self.ALL)
        self.assertTrue(res['OK'], res)
        self.assertTrue(self.existingFile in res['Value']['Successful'])
        self.assertTrue(
            os.strerror(errno.ENOENT)
            in res['Value']['Failed'][self.nonExistingFile], res)
        self.assertTrue(
            os.strerror(errno.EISDIR) in res['Value']['Failed'][self.subDir],
            res)

        res = self.se.isFile(self.ALL)
        self.assertTrue(res['OK'], res)
        self.assertTrue(res['Value']['Successful'][self.existingFile], res)
        self.assertTrue(not res['Value']['Successful'][self.subDir], res)
        self.assertTrue(
            os.strerror(errno.ENOENT)
            in res['Value']['Failed'][self.nonExistingFile], res)

        res = self.se.getFile(self.ALL, localPath=self.destPath)
        self.assertTrue(res['OK'], res)
        self.assertEqual(res['Value']['Successful'][self.existingFile],
                         self.existingFileSize)
        self.assertTrue(
            os.path.exists(
                os.path.join(self.destPath,
                             os.path.basename(self.existingFile))))
        self.assertEqual(res['Value']['Successful'][self.subFile],
                         self.subFileSize)
        self.assertTrue(
            os.path.exists(
                os.path.join(self.destPath, os.path.basename(self.subFile))))
        self.assertTrue(
            os.strerror(errno.ENOENT)
            in res['Value']['Failed'][self.nonExistingFile], res)
        self.assertTrue(
            os.strerror(errno.EISDIR) in res['Value']['Failed'][self.subDir],
            res)

        res = self.se.removeFile(self.ALL)
        self.assertTrue(res['OK'], res)
        self.assertTrue(res['Value']['Successful'][self.existingFile])
        self.assertTrue(not os.path.exists(self.basePath + self.existingFile))
        self.assertTrue(res['Value']['Successful'][self.subFile])
        self.assertTrue(not os.path.exists(self.basePath + self.subFile))
        self.assertTrue(res['Value']['Successful'][self.nonExistingFile])
        self.assertTrue(
            os.strerror(errno.EISDIR) in res['Value']['Failed'][self.subDir])

    @mock.patch(
        'DIRAC.Resources.Storage.StorageElement.StorageElementItem._StorageElementItem__isLocalSE',
        return_value=S_OK(True))  # Pretend it's local
    @mock.patch(
        'DIRAC.Resources.Storage.StorageElement.StorageElementItem.addAccountingOperation',
        return_value=None)  # Don't send accounting
    def test_03_createDirectory(self, mk_isLocalSE, mk_addAccounting):
        """Testing creating directories"""

        res = self.se.createDirectory(self.subDir)
        self.assertTrue(res['OK'], res)
        self.assertTrue(self.subDir in res['Value']['Successful'])
        self.assertTrue(os.path.exists(self.basePath + self.subDir))

    @mock.patch(
        'DIRAC.Resources.Storage.StorageElement.StorageElementItem._StorageElementItem__isLocalSE',
        return_value=S_OK(True))  # Pretend it's local
    @mock.patch(
        'DIRAC.Resources.Storage.StorageElement.StorageElementItem.addAccountingOperation',
        return_value=None)  # Don't send accounting
    def test_04_putDirectory(self, mk_isLocalSE, mk_addAccounting):
        """Testing putDirectory"""

        nonExistingDir = '/test/forsuredoesnotexist'
        localdirs = ['/test', nonExistingDir]

        # Correct size
        res = self.se.putDirectory({'/test': self.srcPath})
        self.assertTrue(res['OK'], res)
        self.assertTrue('/test' in res['Value']['Successful'], res)
        self.assertEqual(res['Value']['Successful']['/test'], {
            'Files': 2,
            'Size': self.existingFileSize + self.subFileSize
        })
        self.assertTrue(os.path.exists(self.basePath + '/test'))
        self.assertTrue(os.path.exists(self.basePath + self.existingFile))
        self.assertTrue(os.path.exists(self.basePath + self.subFile))

        # No existing source directory
        res = self.se.putDirectory({'/test': nonExistingDir})
        self.assertTrue(res['OK'], res)
        self.assertTrue('/test' in res['Value']['Failed'], res)
        self.assertEqual(res['Value']['Failed']['/test'], {
            'Files': 0,
            'Size': 0
        })

        # sub file
        res = self.se.putDirectory({'/test': self.existingFile})
        self.assertTrue(res['OK'], res)
        self.assertTrue('/test' in res['Value']['Failed'], res)
        self.assertEqual(res['Value']['Failed']['/test'], {
            'Files': 0,
            'Size': 0
        })

        res = self.se.exists(self.DIRECTORIES + localdirs)
        self.assertTrue(res['OK'], res)
        self.assertTrue(not res['Value']['Failed'], res)
        self.assertTrue(res['Value']['Successful'][self.subDir], res)
        self.assertTrue(not res['Value']['Successful'][nonExistingDir], res)

        res = self.se.getDirectorySize(self.ALL + localdirs)
        self.assertTrue(res['OK'], res)
        self.assertEqual(res['Value']['Successful'][self.subDir], {
            'Files': 1,
            'Size': self.subFileSize,
            'SubDirs': 0
        })
        self.assertEqual(res['Value']['Successful']['/test'], {
            'Files': 1,
            'Size': self.existingFileSize,
            'SubDirs': 1
        })
        self.assertTrue(
            os.strerror(errno.ENOENT)
            in res['Value']['Failed'][self.nonExistingFile], res)
        self.assertTrue(
            os.strerror(errno.ENOTDIR)
            in res['Value']['Failed'][self.existingFile], res)
        self.assertTrue(
            os.strerror(errno.ENOENT)
            in res['Value']['Failed'][nonExistingDir], res)

        res = self.se.getDirectoryMetadata(self.ALL + localdirs)
        self.assertTrue(res['OK'], res)
        self.assertTrue(self.subDir in res['Value']['Successful'])
        self.assertTrue(
            os.strerror(errno.ENOENT)
            in res['Value']['Failed'][self.nonExistingFile], res)
        self.assertTrue(
            os.strerror(errno.ENOENT)
            in res['Value']['Failed'][nonExistingDir], res)
        self.assertTrue(
            os.strerror(errno.ENOTDIR)
            in res['Value']['Failed'][self.existingFile], res)

        res = self.se.isDirectory(self.ALL + localdirs)
        self.assertTrue(res['OK'], res)
        self.assertTrue(not res['Value']['Successful'][self.existingFile])
        self.assertTrue(res['Value']['Successful'][self.subDir], res)
        self.assertTrue(
            os.strerror(errno.ENOENT)
            in res['Value']['Failed'][self.nonExistingFile], res)
        self.assertTrue(
            os.strerror(errno.ENOENT)
            in res['Value']['Failed'][nonExistingDir], res)

        res = self.se.listDirectory(self.ALL + localdirs)
        self.assertTrue(res['OK'], res)
        self.assertEqual(res['Value']['Successful'][self.subDir], {
            'Files': [self.subFile],
            'SubDirs': []
        })
        self.assertEqual(res['Value']['Successful']['/test'], {
            'Files': [self.existingFile],
            'SubDirs': [self.subDir]
        })
        self.assertTrue(
            os.strerror(errno.ENOENT)
            in res['Value']['Failed'][self.nonExistingFile], res)
        self.assertTrue(
            os.strerror(errno.ENOTDIR)
            in res['Value']['Failed'][self.existingFile], res)
        self.assertTrue(
            os.strerror(errno.ENOENT)
            in res['Value']['Failed'][nonExistingDir], res)

        res = self.se.getDirectory(self.ALL + localdirs,
                                   localPath=self.destPath)
        self.assertTrue(res['OK'], res)
        self.assertEqual(res['Value']['Successful']['/test'], {
            'Files': 2,
            'Size': self.existingFileSize + self.subFileSize
        })
        self.assertTrue(os.path.exists(self.destPath + self.existingFile))
        self.assertTrue(os.path.exists(self.destPath + self.subFile))
        self.assertEqual(res['Value']['Successful'][self.subDir], {
            'Files': 1,
            'Size': self.subFileSize
        })
        self.assertTrue(
            os.path.exists(self.destPath + self.subFile.replace('/test', '')))
        self.assertEqual(res['Value']['Failed'][self.nonExistingFile], {
            'Files': 0,
            'Size': 0
        })
        self.assertEqual(res['Value']['Failed'][self.existingFile], {
            'Files': 0,
            'Size': 0
        })
        self.assertEqual(res['Value']['Failed'][nonExistingDir], {
            'Files': 0,
            'Size': 0
        })

        res = self.se.removeDirectory(nonExistingDir, recursive=False)
        self.assertTrue(res['OK'], res)
        self.assertEqual(res['Value']['Successful'][nonExistingDir], True)

        res = self.se.removeDirectory(nonExistingDir, recursive=True)
        self.assertTrue(res['OK'], res)
        self.assertEqual(res['Value']['Failed'][nonExistingDir], {
            'FilesRemoved': 0,
            'SizeRemoved': 0
        })

        res = self.se.removeDirectory(self.nonExistingFile, recursive=False)
        self.assertTrue(res['OK'], res)
        self.assertEqual(res['Value']['Successful'][self.nonExistingFile],
                         True)

        res = self.se.removeDirectory(self.nonExistingFile, recursive=True)
        self.assertTrue(res['OK'], res)
        self.assertEqual(res['Value']['Failed'][self.nonExistingFile], {
            'FilesRemoved': 0,
            'SizeRemoved': 0
        })

        res = self.se.removeDirectory(self.existingFile, recursive=False)
        self.assertTrue(res['OK'], res)
        self.assertTrue(
            os.strerror(errno.ENOTDIR)
            in res['Value']['Failed'][self.existingFile], res)

        res = self.se.removeDirectory(self.existingFile, recursive=True)
        self.assertTrue(res['OK'], res)
        self.assertEqual(res['Value']['Failed'][self.existingFile], {
            'FilesRemoved': 0,
            'SizeRemoved': 0
        })

        res = self.se.removeDirectory('/test', recursive=False)
        self.assertTrue(res['OK'], res)
        self.assertEqual(res['Value']['Successful']['/test'], True)
        self.assertTrue(not os.path.exists(self.basePath + self.existingFile))
        self.assertTrue(os.path.exists(self.basePath + self.subFile))

        res = self.se.removeDirectory('/test', recursive=True)
        self.assertTrue(res['OK'], res)
        self.assertEqual(res['Value']['Successful']['/test'], {
            'FilesRemoved': 1,
            'SizeRemoved': self.subFileSize
        })
        self.assertTrue(not os.path.exists(self.basePath + '/test'))
Exemple #16
0
class TestBase(unittest.TestCase):
    """Base test class. Defines all the method to test"""
    @mock.patch(
        "DIRAC.Resources.Storage.StorageFactory.StorageFactory._getConfigStorageName",
        side_effect=mock_StorageFactory_getConfigStorageName,
    )
    @mock.patch(
        "DIRAC.Resources.Storage.StorageFactory.StorageFactory._getConfigStorageOptions",
        side_effect=mock_StorageFactory_getConfigStorageOptions,
    )
    @mock.patch(
        "DIRAC.Resources.Storage.StorageFactory.StorageFactory._getConfigStorageProtocols",
        side_effect=mock_StorageFactory_getConfigStorageProtocols,
    )
    @mock.patch(
        "DIRAC.Resources.Storage.StorageElement.StorageElementItem._StorageElementItem__isLocalSE",
        return_value=S_OK(True),
    )  # Pretend it's local
    @mock.patch(
        "DIRAC.Resources.Storage.StorageElement.StorageElementItem.addAccountingOperation",
        return_value=None)  # Don't send accounting
    def setUp(
        self,
        mk_getConfigStorageName,
        mk_getConfigStorageOptions,
        mk_getConfigStorageProtocols,
        mk_isLocalSE,
        mk_addAccountingOperation,
    ):
        self.se = StorageElementItem("FAKE")
        self.se.vo = "test"

        self.basePath = tempfile.mkdtemp(dir="/tmp")
        # Update the basePath of the plugin
        self.se.storages[0].basePath = self.basePath

        self.srcPath = tempfile.mkdtemp(dir="/tmp")

        self.destPath = tempfile.mkdtemp(dir="/tmp")

        self.existingFile = "/test/file.txt"
        self.existingFileSize = 0

        self.nonExistingFile = "/test/nonExistingFile.txt"
        self.subDir = "/test/subDir"
        self.subFile = os.path.join(self.subDir, "subFile.txt")
        self.subFileSize = 0

        self.FILES = [self.existingFile, self.nonExistingFile, self.subFile]
        self.DIRECTORIES = [self.subDir]
        self.ALL = self.FILES + self.DIRECTORIES

        with open(
                os.path.join(self.srcPath,
                             self.existingFile.replace("/test/", "")),
                "w") as f:
            f.write("I put something in the file so that it has a size\n")
        self.existingFileSize = os.path.getsize(
            os.path.join(self.srcPath, self.existingFile.replace("/test/",
                                                                 "")))

        assert self.existingFileSize

        os.mkdir(os.path.join(self.srcPath, os.path.basename(self.subDir)))

        with open(
                os.path.join(self.srcPath, self.subFile.replace("/test/", "")),
                "w") as f:
            f.write("This one should have a size as well\n")
        self.subFileSize = os.path.getsize(
            os.path.join(self.srcPath, self.subFile.replace("/test/", "")))

        assert self.subFileSize

    def tearDown(self):
        shutil.rmtree(self.basePath)
        shutil.rmtree(self.srcPath)
        shutil.rmtree(self.destPath)
        pass

    def walkAll(self):
        for dirname in [self.basePath, self.destPath]:
            self.walkPath(dirname)

    def walkPath(self, path):
        for root, dirs, files in os.walk(path):
            print(root)
            print("  dirs")
            for d in dirs:
                print("    ", os.path.join(root, d))
            print("  files")
            for f in files:
                print("    ", os.path.join(root, f))

    @mock.patch(
        "DIRAC.Resources.Storage.StorageElement.StorageElementItem._StorageElementItem__isLocalSE",
        return_value=S_OK(True),
    )  # Pretend it's local
    @mock.patch(
        "DIRAC.Resources.Storage.StorageElement.StorageElementItem.addAccountingOperation",
        return_value=None)  # Don't send accounting
    def test_01_getURL(self, mk_isLocalSE, mk_addAccounting):
        """Testing getURL"""
        # Testing the getURL
        res = self.se.getURL(self.ALL)
        self.assertTrue(res["OK"], res)
        self.assertTrue(not res["Value"]["Failed"], res["Value"]["Failed"])
        self.assertTrue(len(res["Value"]["Successful"]) == len(self.ALL))
        for lfn, url in res["Value"]["Successful"].items():
            self.assertEqual(url, self.basePath.rstrip("/") + lfn)

    @mock.patch(
        "DIRAC.Resources.Storage.StorageElement.StorageElementItem._StorageElementItem__isLocalSE",
        return_value=S_OK(True),
    )  # Pretend it's local
    @mock.patch(
        "DIRAC.Resources.Storage.StorageElement.StorageElementItem.addAccountingOperation",
        return_value=None)  # Don't send accounting
    def test_02_FileTest(self, mk_isLocalSE, mk_addAccounting):
        """Testing createDirectory"""

        # Putting the files

        def localPutFile(fn, size=0):
            """If fn is '/test/fn.txt', it calls
            { '/test/fn.txt' : /tmp/generatedPath/fn.txt}
            """
            transfDic = {
                fn: os.path.join(self.srcPath, fn.replace("/test/", ""))
            }
            return self.se.putFile(transfDic, sourceSize=size)

        # wrong size
        res = localPutFile(self.existingFile, size=-1)
        self.assertTrue(res["OK"], res)
        self.assertTrue(self.existingFile in res["Value"]["Failed"], res)
        self.assertTrue(
            "not match" in res["Value"]["Failed"][self.existingFile], res)
        self.assertTrue(not os.path.exists(self.basePath + self.existingFile))

        # Correct size
        res = localPutFile(self.existingFile, size=self.existingFileSize)
        self.assertTrue(res["OK"], res)
        self.assertTrue(self.existingFile in res["Value"]["Successful"], res)
        self.assertTrue(os.path.exists(self.basePath + self.existingFile))

        # No size
        res = localPutFile(self.existingFile)
        self.assertTrue(res["OK"], res)
        self.assertTrue(self.existingFile in res["Value"]["Successful"], res)
        self.assertTrue(os.path.exists(self.basePath + self.existingFile))

        # No existing source file
        res = localPutFile(self.nonExistingFile)
        self.assertTrue(res["OK"], res)
        self.assertTrue(self.nonExistingFile in res["Value"]["Failed"], res)
        self.assertTrue(
            os.strerror(errno.ENOENT)
            in res["Value"]["Failed"][self.nonExistingFile], res)

        # sub file
        res = localPutFile(self.subFile)
        self.assertTrue(res["OK"], res)
        self.assertTrue(self.subFile in res["Value"]["Successful"], res)
        self.assertTrue(os.path.exists(self.basePath + self.subFile))

        # Directory
        res = localPutFile(self.subDir)
        self.assertTrue(res["OK"], res)
        self.assertTrue(self.subDir in res["Value"]["Failed"])
        self.assertTrue(
            os.strerror(errno.EISDIR) in res["Value"]["Failed"][self.subDir] or
            # Python 3.9.7+ improved the Exception that is raised
            "Directory does not exist" in res["Value"]["Failed"][self.subDir],
            res,
        )

        res = self.se.exists(self.FILES)
        self.assertTrue(res["OK"], res)
        self.assertTrue(not res["Value"]["Failed"], res)
        self.assertTrue(res["Value"]["Successful"][self.existingFile], res)
        self.assertTrue(not res["Value"]["Successful"][self.nonExistingFile],
                        res)

        res = self.se.getFileSize(self.ALL)
        self.assertTrue(res["OK"], res)
        self.assertEqual(res["Value"]["Successful"][self.existingFile],
                         self.existingFileSize)
        self.assertTrue(
            os.strerror(errno.ENOENT)
            in res["Value"]["Failed"][self.nonExistingFile], res)
        self.assertTrue(
            os.strerror(errno.EISDIR) in res["Value"]["Failed"][self.subDir],
            res)

        res = self.se.getFileMetadata(self.ALL)
        self.assertTrue(res["OK"], res)
        self.assertTrue(self.existingFile in res["Value"]["Successful"])
        self.assertTrue(
            os.strerror(errno.ENOENT)
            in res["Value"]["Failed"][self.nonExistingFile], res)
        self.assertTrue(
            os.strerror(errno.EISDIR) in res["Value"]["Failed"][self.subDir],
            res)

        res = self.se.isFile(self.ALL)
        self.assertTrue(res["OK"], res)
        self.assertTrue(res["Value"]["Successful"][self.existingFile], res)
        self.assertTrue(not res["Value"]["Successful"][self.subDir], res)
        self.assertTrue(
            os.strerror(errno.ENOENT)
            in res["Value"]["Failed"][self.nonExistingFile], res)

        res = self.se.getFile(self.ALL, localPath=self.destPath)
        self.assertTrue(res["OK"], res)
        self.assertEqual(res["Value"]["Successful"][self.existingFile],
                         self.existingFileSize)
        self.assertTrue(
            os.path.exists(
                os.path.join(self.destPath,
                             os.path.basename(self.existingFile))))
        self.assertEqual(res["Value"]["Successful"][self.subFile],
                         self.subFileSize)
        self.assertTrue(
            os.path.exists(
                os.path.join(self.destPath, os.path.basename(self.subFile))))
        self.assertTrue(
            os.strerror(errno.ENOENT)
            in res["Value"]["Failed"][self.nonExistingFile], res)
        self.assertTrue(
            os.strerror(errno.EISDIR) in res["Value"]["Failed"][self.subDir] or
            # Python 3.9.7+ improved the Exception that is raised
            "Directory does not exist" in res["Value"]["Failed"][self.subDir],
            res,
        )

        res = self.se.removeFile(self.ALL)
        self.assertTrue(res["OK"], res)
        self.assertTrue(res["Value"]["Successful"][self.existingFile])
        self.assertTrue(not os.path.exists(self.basePath + self.existingFile))
        self.assertTrue(res["Value"]["Successful"][self.subFile])
        self.assertTrue(not os.path.exists(self.basePath + self.subFile))
        self.assertTrue(res["Value"]["Successful"][self.nonExistingFile])
        self.assertTrue(
            os.strerror(errno.EISDIR) in res["Value"]["Failed"][self.subDir])

    @mock.patch(
        "DIRAC.Resources.Storage.StorageElement.StorageElementItem._StorageElementItem__isLocalSE",
        return_value=S_OK(True),
    )  # Pretend it's local
    @mock.patch(
        "DIRAC.Resources.Storage.StorageElement.StorageElementItem.addAccountingOperation",
        return_value=None)  # Don't send accounting
    def test_03_createDirectory(self, mk_isLocalSE, mk_addAccounting):
        """Testing creating directories"""

        res = self.se.createDirectory(self.subDir)
        self.assertTrue(res["OK"], res)
        self.assertTrue(self.subDir in res["Value"]["Successful"])
        self.assertTrue(os.path.exists(self.basePath + self.subDir))

    @mock.patch(
        "DIRAC.Resources.Storage.StorageElement.StorageElementItem._StorageElementItem__isLocalSE",
        return_value=S_OK(True),
    )  # Pretend it's local
    @mock.patch(
        "DIRAC.Resources.Storage.StorageElement.StorageElementItem.addAccountingOperation",
        return_value=None)  # Don't send accounting
    def test_04_putDirectory(self, mk_isLocalSE, mk_addAccounting):
        """Testing putDirectory"""

        nonExistingDir = "/test/forsuredoesnotexist"
        localdirs = ["/test", nonExistingDir]

        # Correct size
        res = self.se.putDirectory({"/test": self.srcPath})
        self.assertTrue(res["OK"], res)
        self.assertTrue("/test" in res["Value"]["Successful"], res)
        self.assertEqual(res["Value"]["Successful"]["/test"], {
            "Files": 2,
            "Size": self.existingFileSize + self.subFileSize
        })
        self.assertTrue(os.path.exists(self.basePath + "/test"))
        self.assertTrue(os.path.exists(self.basePath + self.existingFile))
        self.assertTrue(os.path.exists(self.basePath + self.subFile))

        # No existing source directory
        res = self.se.putDirectory({"/test": nonExistingDir})
        self.assertTrue(res["OK"], res)
        self.assertTrue("/test" in res["Value"]["Failed"], res)
        self.assertEqual(res["Value"]["Failed"]["/test"], {
            "Files": 0,
            "Size": 0
        })

        # sub file
        res = self.se.putDirectory({"/test": self.existingFile})
        self.assertTrue(res["OK"], res)
        self.assertTrue("/test" in res["Value"]["Failed"], res)
        self.assertEqual(res["Value"]["Failed"]["/test"], {
            "Files": 0,
            "Size": 0
        })

        res = self.se.exists(self.DIRECTORIES + localdirs)
        self.assertTrue(res["OK"], res)
        self.assertTrue(not res["Value"]["Failed"], res)
        self.assertTrue(res["Value"]["Successful"][self.subDir], res)
        self.assertTrue(not res["Value"]["Successful"][nonExistingDir], res)

        res = self.se.getDirectorySize(self.ALL + localdirs)
        self.assertTrue(res["OK"], res)
        self.assertEqual(res["Value"]["Successful"][self.subDir], {
            "Files": 1,
            "Size": self.subFileSize,
            "SubDirs": 0
        })
        self.assertEqual(res["Value"]["Successful"]["/test"], {
            "Files": 1,
            "Size": self.existingFileSize,
            "SubDirs": 1
        })
        self.assertTrue(
            os.strerror(errno.ENOENT)
            in res["Value"]["Failed"][self.nonExistingFile], res)
        self.assertTrue(
            os.strerror(errno.ENOTDIR)
            in res["Value"]["Failed"][self.existingFile], res)
        self.assertTrue(
            os.strerror(errno.ENOENT)
            in res["Value"]["Failed"][nonExistingDir], res)

        res = self.se.getDirectoryMetadata(self.ALL + localdirs)
        self.assertTrue(res["OK"], res)
        self.assertTrue(self.subDir in res["Value"]["Successful"])
        self.assertTrue(
            os.strerror(errno.ENOENT)
            in res["Value"]["Failed"][self.nonExistingFile], res)
        self.assertTrue(
            os.strerror(errno.ENOENT)
            in res["Value"]["Failed"][nonExistingDir], res)
        self.assertTrue(
            os.strerror(errno.ENOTDIR)
            in res["Value"]["Failed"][self.existingFile], res)

        res = self.se.isDirectory(self.ALL + localdirs)
        self.assertTrue(res["OK"], res)
        self.assertTrue(not res["Value"]["Successful"][self.existingFile])
        self.assertTrue(res["Value"]["Successful"][self.subDir], res)
        self.assertTrue(
            os.strerror(errno.ENOENT)
            in res["Value"]["Failed"][self.nonExistingFile], res)
        self.assertTrue(
            os.strerror(errno.ENOENT)
            in res["Value"]["Failed"][nonExistingDir], res)

        res = self.se.listDirectory(self.ALL + localdirs)
        self.assertTrue(res["OK"], res)

        self.assertEqual(sorted(list(res["Value"]["Successful"][self.subDir])),
                         sorted(["Files", "SubDirs"]))
        self.assertEqual(
            list(res["Value"]["Successful"][self.subDir]["Files"]),
            [self.subFile])
        self.assertEqual(
            list(res["Value"]["Successful"][self.subDir]["SubDirs"]), [])

        self.assertEqual(list(res["Value"]["Successful"]["/test"]["Files"]),
                         [self.existingFile])
        self.assertEqual(list(res["Value"]["Successful"]["/test"]["SubDirs"]),
                         [self.subDir])

        self.assertTrue(
            os.strerror(errno.ENOENT)
            in res["Value"]["Failed"][self.nonExistingFile], res)
        self.assertTrue(
            os.strerror(errno.ENOTDIR)
            in res["Value"]["Failed"][self.existingFile], res)
        self.assertTrue(
            os.strerror(errno.ENOENT)
            in res["Value"]["Failed"][nonExistingDir], res)

        res = self.se.getDirectory(self.ALL + localdirs,
                                   localPath=self.destPath)
        self.assertTrue(res["OK"], res)
        self.assertEqual(res["Value"]["Successful"]["/test"], {
            "Files": 2,
            "Size": self.existingFileSize + self.subFileSize
        })
        self.assertTrue(os.path.exists(self.destPath + self.existingFile))
        self.assertTrue(os.path.exists(self.destPath + self.subFile))
        self.assertEqual(res["Value"]["Successful"][self.subDir], {
            "Files": 1,
            "Size": self.subFileSize
        })
        self.assertTrue(
            os.path.exists(self.destPath + self.subFile.replace("/test", "")))
        self.assertEqual(res["Value"]["Failed"][self.nonExistingFile], {
            "Files": 0,
            "Size": 0
        })
        self.assertEqual(res["Value"]["Failed"][self.existingFile], {
            "Files": 0,
            "Size": 0
        })
        self.assertEqual(res["Value"]["Failed"][nonExistingDir], {
            "Files": 0,
            "Size": 0
        })

        res = self.se.removeDirectory(nonExistingDir, recursive=False)
        self.assertTrue(res["OK"], res)
        self.assertEqual(res["Value"]["Successful"][nonExistingDir], True)

        res = self.se.removeDirectory(nonExistingDir, recursive=True)
        self.assertTrue(res["OK"], res)
        self.assertEqual(res["Value"]["Failed"][nonExistingDir], {
            "FilesRemoved": 0,
            "SizeRemoved": 0
        })

        res = self.se.removeDirectory(self.nonExistingFile, recursive=False)
        self.assertTrue(res["OK"], res)
        self.assertEqual(res["Value"]["Successful"][self.nonExistingFile],
                         True)

        res = self.se.removeDirectory(self.nonExistingFile, recursive=True)
        self.assertTrue(res["OK"], res)
        self.assertEqual(res["Value"]["Failed"][self.nonExistingFile], {
            "FilesRemoved": 0,
            "SizeRemoved": 0
        })

        res = self.se.removeDirectory(self.existingFile, recursive=False)
        self.assertTrue(res["OK"], res)
        self.assertTrue(
            os.strerror(errno.ENOTDIR)
            in res["Value"]["Failed"][self.existingFile], res)

        res = self.se.removeDirectory(self.existingFile, recursive=True)
        self.assertTrue(res["OK"], res)
        self.assertEqual(res["Value"]["Failed"][self.existingFile], {
            "FilesRemoved": 0,
            "SizeRemoved": 0
        })

        res = self.se.removeDirectory("/test", recursive=False)
        self.assertTrue(res["OK"], res)
        self.assertEqual(res["Value"]["Successful"]["/test"], True)
        self.assertTrue(not os.path.exists(self.basePath + self.existingFile))
        self.assertTrue(os.path.exists(self.basePath + self.subFile))

        res = self.se.removeDirectory("/test", recursive=True)
        self.assertTrue(res["OK"], res)
        self.assertEqual(res["Value"]["Successful"]["/test"], {
            "FilesRemoved": 1,
            "SizeRemoved": self.subFileSize
        })
        self.assertTrue(not os.path.exists(self.basePath + "/test"))
Exemple #17
0
    def setUp(self, _mk_generateStorage, _mk_isLocalSE,
              _mk_addAccountingOperation):

        # Creating test configuration file
        self.testCfgFileName = os.path.join(tempfile.gettempdir(),
                                            'test_StorageElement.cfg')
        cfgContent = '''
    DIRAC
    {
      Setup=TestSetup
    }
    Resources{
      StorageElements{
        DiskStorageA
        {
          BackendType = local
          ReadAccess = Active
          WriteAccess = Active
          GFAL2_SRM2
          {
            Host = srm-diskandtape.cern.ch
            SpaceToken = Disk
            Protocol = srm
            Path = /base/pathDisk
          }
        }
        # Same end point as DiskStorageA, but with a different space token
        # So they should be considered the same
        TapeStorageA
        {
          BackendType = local
          ReadAccess = Active
          WriteAccess = Active
          GFAL2_SRM2
          {
            Host = srm-diskandtape.cern.ch
            Protocol = srm
            SpaceToken = Tape
            Path = /base/pathDisk
          }
        }
        # Normally does not happen in practice, but this is the same as DiskStorageA with more plugins
        DiskStorageAWithMoreProtocol
        {
          BackendType = local
          ReadAccess = Active
          WriteAccess = Active
          GFAL2_SRM2
          {
            Host = srm-diskandtape.cern.ch
            SpaceToken = Disk
            Protocol = srm
            Path = /base/pathDisk
          }
          GFAL2_GSIFTP
          {
            Host = gsiftp-diskandtape.cern.ch
            SpaceToken = Disk
            Protocol = gsiftp
            Path = /base/pathDisk
          }
        }
        # A different storage
        StorageB
        {
          BackendType = local
          ReadAccess = Active
          WriteAccess = Active
          GFAL2_GSIFTP
          {
            Host = otherstorage.cern.ch
            SpaceToken = Disk
            Protocol = gsiftp
            Path = /base/pathDisk
          }
        }
        # The same endpoint as StorageB but with differetn base path, so not the same
        StorageBWithOtherBasePath
        {
          BackendType = local
          ReadAccess = Active
          WriteAccess = Active
          GFAL2_GSIFTP
          {
            Host = otherstorage.cern.ch
            SpaceToken = Disk
            Protocol = gsiftp
            Path = /base/otherPath
          }
        }
    }
    Operations{
      Defaults
      {
        DataManagement{
          AccessProtocols = fakeProto
          AccessProtocols += root
          WriteProtocols = srm
        }
      }
    }
    '''

        with open(self.testCfgFileName, 'w') as f:
            f.write(cfgContent)

        # SUPER UGLY: one must recreate the CFG objects of gConfigurationData
        # not to conflict with other tests that might be using a local dirac.cfg
        gConfigurationData.localCFG = CFG()
        gConfigurationData.remoteCFG = CFG()
        gConfigurationData.mergedCFG = CFG()
        gConfigurationData.generateNewVersion()

        gConfig = ConfigurationClient(fileToLoadList=[
            self.testCfgFileName
        ])  # we replace the configuration by our own one.

        self.diskStorageA = StorageElementItem('DiskStorageA')
        self.diskStorageA.vo = 'lhcb'
        self.tapeStorageA = StorageElementItem('TapeStorageA')
        self.tapeStorageA.vo = 'lhcb'
        self.diskStorageAWithMoreProtocol = StorageElementItem(
            'DiskStorageAWithMoreProtocol')
        self.diskStorageAWithMoreProtocol.vo = 'lhcb'
        self.storageB = StorageElementItem('StorageB')
        self.storageB.vo = 'lhcb'
        self.storageBWithOtherBasePath = StorageElementItem(
            'StorageBWithOtherBasePath')
        self.storageBWithOtherBasePath.vo = 'lhcb'
Exemple #18
0
    def execute(self):
        """ Run this.
    """
        if not self.workflowStatus['OK'] or not self.stepStatus['OK']:
            self.log.verbose(
                'Workflow status = %s, step status = %s' %
                (self.workflowStatus['OK'], self.stepStatus['OK']))
            return S_OK('Workflow status is not OK')
        result = self.resolveInputVariables()
        if not result['OK']:
            self.log.error("Failed to resolve input parameters:",
                           result["Message"])
            return result
        if not self.srmfiles:
            self.log.error('Files txt was not found correctly: %s' %
                           self.srmfiles)
            return S_ERROR('Files txt was not found correctly')

        if not isinstance(self.files[0], dict):
            self.log.error('Files were not found correctly: %s' % self.files)
            return S_ERROR('Files were not found correctly')

        ##Now need to check that there are not that many concurrent jobs getting the overlay at the same time
        max_concurrent_running = self.ops.getValue(
            '/GetSRM/MaxConcurrentRunning', 100)
        error_count = 0
        while 1:
            if error_count > 10:
                self.log.error(
                    'JobDB Content does not return expected dictionary')
                return S_ERROR(
                    'Failed to get number of concurrent overlay jobs')
            jobMonitor = RPCClient('WorkloadManagement/JobMonitoring',
                                   timeout=60)
            res = jobMonitor.getCurrentJobCounters(
                {'ApplicationStatus': 'Downloading SRM files'})
            if not res['OK']:
                error_count += 1
                time.sleep(60)
                continue
            running = 0
            if 'Running' in res['Value']:
                running = res['Value']['Running']
            if running < max_concurrent_running:
                break
            else:
                time.sleep(60)

        self.setApplicationStatus('Downloading SRM files')
        for filed in self.files:
            if 'file' not in filed or 'site' not in filed:
                self.log.error('Dictionnary does not contain correct keys')
                return S_ERROR('Dictionnary does not contain correct keys')
            start = os.getcwd()
            downloadDir = tempfile.mkdtemp(prefix='InputData_%s' %
                                           (self.counter),
                                           dir=start)
            os.chdir(downloadDir)
            storageElement = StorageElement(filed['site'])
            result = storageElement.getFile(filed['file'])
            if result['Value']['Failed']:
                result = storageElement.getFile(filed['file'])
            os.chdir(start)
            if result['Value']['Failed']:
                self.log.error("Failed to get the file from storage:",
                               result['Value']['Failed'])
                return result
            self.counter += 1

        return S_OK()
Exemple #19
0
class TestBase(unittest.TestCase):
    """ Base test class. Defines all the method to test
  """
    @mock.patch(
        'DIRAC.Resources.Storage.StorageFactory.StorageFactory._StorageFactory__generateStorageObject',
        side_effect=mock_StorageFactory_generateStorageObject)
    @mock.patch(
        'DIRAC.Resources.Storage.StorageElement.StorageElementItem._StorageElementItem__isLocalSE',
        return_value=S_OK(True))  # Pretend it's local
    @mock.patch(
        'DIRAC.Resources.Storage.StorageElement.StorageElementItem.addAccountingOperation',
        return_value=None)  # Don't send accounting
    def setUp(self, _mk_generateStorage, _mk_isLocalSE,
              _mk_addAccountingOperation):

        # Creating test configuration file
        self.testCfgFileName = os.path.join(tempfile.gettempdir(),
                                            'test_StorageElement.cfg')
        cfgContent = '''
    DIRAC
    {
      Setup=TestSetup
    }
    Resources{
      StorageElements{
        StorageA
        {
          BackendType = local
          ReadAccess = Active
          WriteAccess = Active
          AccessProtocol.0
          {
            Host =
            PluginName = File
            Protocol = file
            Path =
          }
        }
        StorageB
        {
          BackendType = local
          ReadAccess = Active
          WriteAccess = Active
          AccessProtocol.0
          {
            Host =
            PluginName = SRM2
            Protocol = srm
            Path =
          }
        }
        StorageC
        {
          BackendType = local
          ReadAccess = Active
          WriteAccess = Active
          AccessProtocol.0
          {
            Host =
            PluginName = XROOT
            Protocol = root
            Path =
          }
        }
        StorageD
        {
          BackendType = local
          ReadAccess = Active
          WriteAccess = Active
          AccessProtocol.0
          {
            Host =
            PluginName = SRM2
            Protocol = srm
            Path =
          }
          AccessProtocol.1
          {
            Host =
            PluginName = XROOT
            Protocol = root
            Path =
          }
        }
        StorageE
        {
          BackendType = local
          ReadAccess = Active
          WriteAccess = Active
          WriteProtocols = root
          WriteProtocols += srm
          AccessProtocol.0
          {
            Host =
            PluginName = SRM2
            Protocol = srm
            Path =
          }
          AccessProtocol.1
          {
            Host =
            PluginName = XROOT
            Protocol = root
            Path =
          }
        }
        StorageX
        {
          BackendType = local
          ReadAccess = Active
          WriteAccess = Active
          WriteProtocols = gsiftp
          AccessProtocols = root
          AccessProtocol.0
          {
            Host =
            PluginName = GSIFTP
            Protocol = gsiftp
            Path =
          }
          AccessProtocol.1
          {
            Host =
            PluginName = XROOT
            Protocol = root
            Path =
          }
        }
        StorageY
        {
          BackendType = local
          ReadAccess = Active
          WriteAccess = Active
          AccessProtocols = gsiftp
          AccessProtocols += srm
          AccessProtocol.0
          {
            Host =
            PluginName = GSIFTP
            Protocol = gsiftp
            Path =
          }
          AccessProtocol.1
          {
            Host =
            PluginName = SRM2
            Protocol = srm
            Path =
          }
        }
        StorageZ
        {
          BackendType = local
          ReadAccess = Active
          WriteAccess = Active
          AccessProtocols = root
          AccessProtocols += srm
          WriteProtocols = root
          WriteProtocols += srm
          AccessProtocol.0
          {
            Host =
            PluginName = ROOT
            Protocol = root
            Path =
          }
          AccessProtocol.1
          {
            Host =
            PluginName = SRM2
            Protocol = srm
            Path =
          }
        }
      }

    }
    Operations{
      Defaults
      {
        DataManagement{
          AccessProtocols = fakeProto
          AccessProtocols += root
          WriteProtocols = srm
        }
      }
    }
    '''

        with open(self.testCfgFileName, 'w') as f:
            f.write(cfgContent)

        # SUPER UGLY: one must recreate the CFG objects of gConfigurationData
        # not to conflict with other tests that might be using a local dirac.cfg
        gConfigurationData.localCFG = CFG()
        gConfigurationData.remoteCFG = CFG()
        gConfigurationData.mergedCFG = CFG()
        gConfigurationData.generateNewVersion()

        gConfig = ConfigurationClient(fileToLoadList=[
            self.testCfgFileName
        ])  # we replace the configuration by our own one.

        self.seA = StorageElementItem('StorageA')
        self.seA.vo = 'lhcb'
        self.seB = StorageElementItem('StorageB')
        self.seB.vo = 'lhcb'
        self.seC = StorageElementItem('StorageC')
        self.seC.vo = 'lhcb'
        self.seD = StorageElementItem('StorageD')
        self.seD.vo = 'lhcb'
        self.seE = StorageElementItem('StorageE')
        self.seE.vo = 'lhcb'

        self.seX = StorageElementItem('StorageX')
        self.seX.vo = 'lhcb'
        self.seY = StorageElementItem('StorageY')
        self.seY.vo = 'lhcb'
        self.seZ = StorageElementItem('StorageZ')
        self.seZ.vo = 'lhcb'

    def tearDown(self):
        try:
            os.remove(self.testCfgFileName)
        except OSError:
            pass
        # SUPER UGLY: one must recreate the CFG objects of gConfigurationData
        # not to conflict with other tests that might be using a local dirac.cfg
        gConfigurationData.localCFG = CFG()
        gConfigurationData.remoteCFG = CFG()
        gConfigurationData.mergedCFG = CFG()
        gConfigurationData.generateNewVersion()

    @mock.patch(
        'DIRAC.Resources.Storage.StorageElement.StorageElementItem._StorageElementItem__isLocalSE',
        return_value=S_OK(True))  # Pretend it's local
    @mock.patch(
        'DIRAC.Resources.Storage.StorageElement.StorageElementItem.addAccountingOperation',
        return_value=None)  # Don't send accounting
    def test_01_negociateProtocolWithOtherSE(self, mk_isLocalSE,
                                             mk_addAccounting):
        """Testing negotiation algorithm"""

        # Find common protocol between SRM2 and File
        res = self.seA.negociateProtocolWithOtherSE(self.seB)
        self.assertTrue(res['OK'], res)
        self.assertEqual(res['Value'], ['file'])

        # Find common protocol between File and SRM@
        res = self.seB.negociateProtocolWithOtherSE(self.seA)
        self.assertTrue(res['OK'], res)
        self.assertEqual(res['Value'], ['file'])

        # Find common protocol between XROOT and File
        # Nothing goes from xroot to file
        res = self.seA.negociateProtocolWithOtherSE(self.seC)
        self.assertTrue(res['OK'], res)
        self.assertEqual(res['Value'], [])

        # Find common protocol between File and XROOT
        res = self.seC.negociateProtocolWithOtherSE(self.seA)
        self.assertTrue(res['OK'], res)
        self.assertEqual(res['Value'], ['file'])

        # Find common protocol between File and File
        res = self.seA.negociateProtocolWithOtherSE(self.seA)
        self.assertTrue(res['OK'], res)
        self.assertEqual(res['Value'], ['file'])

        # Find common protocol between SRM and SRM
        res = self.seB.negociateProtocolWithOtherSE(self.seB)
        self.assertTrue(res['OK'], res)
        self.assertEqual(sorted(res['Value']), sorted(['file', 'root', 'srm']))

        # Find common protocol between SRM and XROOT
        res = self.seC.negociateProtocolWithOtherSE(self.seB)
        self.assertTrue(res['OK'], res)
        self.assertEqual(sorted(res['Value']), sorted(['root', 'file']))

        # Find common protocol between XROOT and SRM
        res = self.seC.negociateProtocolWithOtherSE(self.seB)
        self.assertTrue(res['OK'], res)
        self.assertEqual(sorted(res['Value']), sorted(['root', 'file']))

        # Testing restrictions
        res = self.seC.negociateProtocolWithOtherSE(self.seB,
                                                    protocols=['file'])
        self.assertTrue(res['OK'], res)
        self.assertEqual(sorted(res['Value']), ['file'])

        res = self.seC.negociateProtocolWithOtherSE(self.seB,
                                                    protocols=['nonexisting'])
        self.assertTrue(res['OK'], res)
        self.assertEqual(res['Value'], [])

    @mock.patch(
        'DIRAC.Resources.Storage.StorageElement.StorageElementItem._StorageElementItem__isLocalSE',
        return_value=S_OK(True))  # Pretend it's local
    @mock.patch(
        'DIRAC.Resources.Storage.StorageElement.StorageElementItem.addAccountingOperation',
        return_value=None)  # Don't send accounting
    def test_02_followOrder(self, _mk_isLocalSE, _mk_addAccounting):
        """Testing If the order of preferred protocols is respected"""

        for permutation in itertools.permutations(
            ['srm', 'file', 'root', 'nonexisting']):
            permuList = list(permutation)
            # Don't get tricked ! remove cannot be put
            # after the conversion, because it is inplace modification
            permuList.remove('nonexisting')
            res = self.seD.negociateProtocolWithOtherSE(self.seD,
                                                        protocols=permutation)
            self.assertTrue(res['OK'], res)
            self.assertEqual(res['Value'], permuList)

    @mock.patch(
        'DIRAC.Resources.Storage.StorageElement.StorageElementItem._StorageElementItem__isLocalSE',
        return_value=S_OK(True))  # Pretend it's local
    @mock.patch(
        'DIRAC.Resources.Storage.StorageElement.StorageElementItem.addAccountingOperation',
        return_value=None)  # Don't send accounting
    def test_03_multiProtocolThirdParty(self, _mk_isLocalSE,
                                        _mk_addAccounting):
        """
      Test case for storages with several protocols

      Here comes the fun :-)
      Suppose we have endpoints that we can read in root, but cannot write
      If we have root in the accessProtocols and thirdPartyProtocols lists
      but not in the writeProtocols, we should get a root url to read,
      and write with SRM

      We reproduce here the behavior of DataManager.replicate

    """

        thirdPartyProtocols = ['root', 'srm']

        lfn = '/lhcb/fake/lfn'
        res = self.seD.negociateProtocolWithOtherSE(
            self.seD, protocols=thirdPartyProtocols)
        self.assertTrue(res['OK'], res)
        self.assertEqual(res['Value'], thirdPartyProtocols)

        # Only the XROOT plugin here implements the geTransportURL
        # that returns what we want, so we know that
        # if the return is successful, it is because of the XROOT
        res = self.seD.getURL(lfn, protocol=res['Value'])
        self.assertTrue(res['OK'], res)
        self.assertTrue(lfn in res['Value']['Successful'], res)

        srcUrl = res['Value']['Successful'][lfn]
        self.assertEqual(srcUrl, "root:getTransportURL")

        # Only the SRM2 plugin here implements the putFile method
        # so if we get a success here, it means that we used the SRM plugin
        res = self.seD.replicateFile({lfn: srcUrl},
                                     sourceSize=123,
                                     inputProtocol='root')

        self.assertTrue(res['OK'], res)
        self.assertTrue(lfn in res['Value']['Successful'], res)
        self.assertEqual(res['Value']['Successful'][lfn], "srm:putFile")

    @mock.patch(
        'DIRAC.Resources.Storage.StorageElement.StorageElementItem._StorageElementItem__isLocalSE',
        return_value=S_OK(True))  # Pretend it's local
    @mock.patch(
        'DIRAC.Resources.Storage.StorageElement.StorageElementItem.addAccountingOperation',
        return_value=None)  # Don't send accounting
    def test_04_thirdPartyLocalWrite(self, _mk_isLocalSE, _mk_addAccounting):
        """
      Test case for storages with several protocols

      Here, we locally define the write protocol to be root and srm
      So we should be able to do everything with XROOT plugin

    """

        thirdPartyProtocols = ['root', 'srm']

        lfn = '/lhcb/fake/lfn'
        res = self.seE.negociateProtocolWithOtherSE(
            self.seE, protocols=thirdPartyProtocols)
        self.assertTrue(res['OK'], res)
        self.assertEqual(res['Value'], thirdPartyProtocols)

        res = self.seE.getURL(lfn, protocol=res['Value'])
        self.assertTrue(res['OK'], res)
        self.assertTrue(lfn in res['Value']['Successful'], res)

        srcUrl = res['Value']['Successful'][lfn]
        self.assertEqual(srcUrl, "root:getTransportURL")

        res = self.seE.replicateFile({lfn: srcUrl},
                                     sourceSize=123,
                                     inputProtocol='root')

        self.assertTrue(res['OK'], res)
        self.assertTrue(lfn in res['Value']['Successful'], res)
        self.assertEqual(res['Value']['Successful'][lfn], "root:putFile")

    @mock.patch(
        'DIRAC.Resources.Storage.StorageElement.StorageElementItem._StorageElementItem__isLocalSE',
        return_value=S_OK(True))  # Pretend it's local
    @mock.patch(
        'DIRAC.Resources.Storage.StorageElement.StorageElementItem.addAccountingOperation',
        return_value=None)  # Don't send accounting
    def test_05_thirdPartyMix(self, _mk_isLocalSE, _mk_addAccounting):
        """
      Test case for storages with several protocols

      Here, we locally define the write protocol for the destination, so it should
      all go directly through the XROOT plugin

    """

        thirdPartyProtocols = ['root', 'srm']

        lfn = '/lhcb/fake/lfn'
        res = self.seE.negociateProtocolWithOtherSE(
            self.seD, protocols=thirdPartyProtocols)
        self.assertTrue(res['OK'], res)
        self.assertEqual(res['Value'], thirdPartyProtocols)

        res = self.seD.getURL(lfn, protocol=res['Value'])
        self.assertTrue(res['OK'], res)
        self.assertTrue(lfn in res['Value']['Successful'], res)

        srcUrl = res['Value']['Successful'][lfn]
        self.assertEqual(srcUrl, "root:getTransportURL")

        res = self.seE.replicateFile({lfn: srcUrl},
                                     sourceSize=123,
                                     inputProtocol='root')

        self.assertTrue(res['OK'], res)
        self.assertTrue(lfn in res['Value']['Successful'], res)
        self.assertEqual(res['Value']['Successful'][lfn], "root:putFile")

    @mock.patch(
        'DIRAC.Resources.Storage.StorageElement.StorageElementItem._StorageElementItem__isLocalSE',
        return_value=S_OK(True))  # Pretend it's local
    @mock.patch(
        'DIRAC.Resources.Storage.StorageElement.StorageElementItem.addAccountingOperation',
        return_value=None)  # Don't send accounting
    def test_06_thirdPartyMixOpposite(self, _mk_isLocalSE, _mk_addAccounting):
        """
      Test case for storages with several protocols

      Here, we locally define the write protocol for the source, so it should
      get the source directly using XROOT, and perform the put using SRM

    """

        thirdPartyProtocols = ['root', 'srm']

        lfn = '/lhcb/fake/lfn'
        res = self.seD.negociateProtocolWithOtherSE(
            self.seE, protocols=thirdPartyProtocols)
        self.assertTrue(res['OK'], res)
        self.assertEqual(res['Value'], thirdPartyProtocols)

        res = self.seE.getURL(lfn, protocol=res['Value'])
        self.assertTrue(res['OK'], res)
        self.assertTrue(lfn in res['Value']['Successful'], res)

        srcUrl = res['Value']['Successful'][lfn]
        self.assertEqual(srcUrl, "root:getTransportURL")

        res = self.seD.replicateFile({lfn: srcUrl},
                                     sourceSize=123,
                                     inputProtocol='root')

        self.assertTrue(res['OK'], res)
        self.assertTrue(lfn in res['Value']['Successful'], res)
        self.assertEqual(res['Value']['Successful'][lfn], "srm:putFile")

    @mock.patch(
        'DIRAC.Resources.Storage.StorageElement.StorageElementItem._StorageElementItem__isLocalSE',
        return_value=S_OK(True))  # Pretend it's local
    @mock.patch(
        'DIRAC.Resources.Storage.StorageElement.StorageElementItem.addAccountingOperation',
        return_value=None)  # Don't send accounting
    def test_07_multiProtocolSrmOnly(self, _mk_isLocalSE, _mk_addAccounting):
        """
      Test case for storages with several protocols

      Here comes the fun :-)
      Suppose we have endpoints that we can read in root, but cannot write
      If we have root in the accessProtocols and thirdPartyProtocols lists
      but not in the writeProtocols, we should get a root url to read,
      and write with SRM

      We reproduce here the behavior of DataManager.replicate

    """

        thirdPartyProtocols = ['srm']

        lfn = '/lhcb/fake/lfn'
        res = self.seD.negociateProtocolWithOtherSE(
            self.seD, protocols=thirdPartyProtocols)
        self.assertTrue(res['OK'], res)
        self.assertEqual(res['Value'], thirdPartyProtocols)

        res = self.seD.getURL(lfn, protocol=res['Value'])
        self.assertTrue(res['OK'], res)
        self.assertTrue(lfn in res['Value']['Successful'], res)

        srcUrl = res['Value']['Successful'][lfn]
        self.assertEqual(srcUrl, "srm:getTransportURL")

        res = self.seD.replicateFile({lfn: srcUrl},
                                     sourceSize=123,
                                     inputProtocol='srm')

        self.assertTrue(res['OK'], res)
        self.assertTrue(lfn in res['Value']['Successful'], res)
        self.assertEqual(res['Value']['Successful'][lfn], "srm:putFile")

    @mock.patch(
        'DIRAC.Resources.Storage.StorageElement.StorageElementItem._StorageElementItem__isLocalSE',
        return_value=S_OK(True))  # Pretend it's local
    @mock.patch(
        'DIRAC.Resources.Storage.StorageElement.StorageElementItem.addAccountingOperation',
        return_value=None)  # Don't send accounting
    def test_08_multiProtocolFTS(self, _mk_isLocalSE, _mk_addAccounting):
        """
      Test case FTS replication between storages with several protocols

      Here comes the fun :-)
      Suppose we have endpoints that we can read in root, but cannot write
      If we have root in the accessProtocols and thirdPartyProtocols lists
      but not in the writeProtocols, we should get a root url to read,
      and write with SRM.
      And We should get the proper url for source and destination

      Storage X, Y and Z represents the situation we could now have in LHCb:
        * X is RAL Echo: you read with root, write with gsiftp
        * Y is Gridka: you have gsiftp available for read only
        * Z is CERN EOS: you can do everything with EOS

      This makes it necessary to add gsiftp as third party option to write to ECHO

    """
        thirdPartyProtocols = ['root', 'gsiftp', 'srm']
        rankedProtocols = [
            'root', 'gsiftp', 'gsidcap', 'dcap', 'file', 'srm', 'rfio'
        ]

        lfn = '/lhcb/fake/lfn'

        # RAL -> GRIDKA
        # We should read using root and write through srm
        res = self.seY.generateTransferURLsBetweenSEs(
            lfn, self.seX, protocols=rankedProtocols)
        self.assertTrue(res['OK'], res)
        urlPair = res['Value']['Successful'].get(lfn)
        self.assertTupleEqual(urlPair, ('root:%s' % lfn, 'srm:%s' % lfn))
        protoPair = res['Value']['Protocols']
        self.assertTupleEqual(protoPair, ('root', 'srm'))

        # RAL -> CERN
        # We should read using root and write directly with it
        res = self.seZ.generateTransferURLsBetweenSEs(
            lfn, self.seX, protocols=rankedProtocols)
        self.assertTrue(res['OK'], res)
        urlPair = res['Value']['Successful'].get(lfn)
        self.assertTupleEqual(urlPair, ('root:%s' % lfn, 'root:%s' % lfn))
        protoPair = res['Value']['Protocols']
        self.assertTupleEqual(protoPair, ('root', 'root'))

        # GRIDKA -> RAL
        # We should read using gsiftp and write directly with it
        res = self.seX.generateTransferURLsBetweenSEs(
            lfn, self.seY, protocols=rankedProtocols)
        self.assertTrue(res['OK'], res)
        urlPair = res['Value']['Successful'].get(lfn)
        self.assertTupleEqual(urlPair, ('gsiftp:%s' % lfn, 'gsiftp:%s' % lfn))
        protoPair = res['Value']['Protocols']
        self.assertTupleEqual(protoPair, ('gsiftp', 'gsiftp'))

        # GRIDKA -> CERN
        # We should read using srm and write with root
        res = self.seZ.generateTransferURLsBetweenSEs(
            lfn, self.seY, protocols=rankedProtocols)
        self.assertTrue(res['OK'], res)
        urlPair = res['Value']['Successful'].get(lfn)
        self.assertTupleEqual(urlPair, ('srm:%s' % lfn, 'root:%s' % lfn))
        protoPair = res['Value']['Protocols']
        self.assertTupleEqual(protoPair, ('srm', 'root'))

        # CERN -> RAL
        # We should read using srm and write with gsiftp
        res = self.seX.generateTransferURLsBetweenSEs(
            lfn, self.seZ, protocols=rankedProtocols)
        self.assertTrue(res['OK'], res)
        urlPair = res['Value']['Successful'].get(lfn)
        self.assertTupleEqual(urlPair, ('srm:%s' % lfn, 'gsiftp:%s' % lfn))
        protoPair = res['Value']['Protocols']
        self.assertTupleEqual(protoPair, ('srm', 'gsiftp'))

        # CERN -> GRIDKA
        # We should read using root and write directly with srm
        res = self.seY.generateTransferURLsBetweenSEs(
            lfn, self.seZ, protocols=rankedProtocols)
        self.assertTrue(res['OK'], res)
        urlPair = res['Value']['Successful'].get(lfn)
        self.assertTupleEqual(urlPair, ('root:%s' % lfn, 'srm:%s' % lfn))
        protoPair = res['Value']['Protocols']
        self.assertTupleEqual(protoPair, ('root', 'srm'))
Exemple #20
0
    def setUp(self, _mk_generateStorage, _mk_isLocalSE,
              _mk_addAccountingOperation):

        # Creating test configuration file
        self.testCfgFileName = os.path.join(tempfile.gettempdir(),
                                            'test_StorageElement.cfg')
        cfgContent = '''
    DIRAC
    {
      Setup=TestSetup
    }
    Resources{
      StorageElements{
        StorageA
        {
          BackendType = local
          ReadAccess = Active
          WriteAccess = Active
          AccessProtocol.0
          {
            Host =
            PluginName = File
            Protocol = file
            Path =
          }
        }
        StorageB
        {
          BackendType = local
          ReadAccess = Active
          WriteAccess = Active
          AccessProtocol.0
          {
            Host =
            PluginName = SRM2
            Protocol = srm
            Path =
          }
        }
        StorageC
        {
          BackendType = local
          ReadAccess = Active
          WriteAccess = Active
          AccessProtocol.0
          {
            Host =
            PluginName = XROOT
            Protocol = root
            Path =
          }
        }
        StorageD
        {
          BackendType = local
          ReadAccess = Active
          WriteAccess = Active
          AccessProtocol.0
          {
            Host =
            PluginName = SRM2
            Protocol = srm
            Path =
          }
          AccessProtocol.1
          {
            Host =
            PluginName = XROOT
            Protocol = root
            Path =
          }
        }
        StorageE
        {
          BackendType = local
          ReadAccess = Active
          WriteAccess = Active
          WriteProtocols = root
          WriteProtocols += srm
          AccessProtocol.0
          {
            Host =
            PluginName = SRM2
            Protocol = srm
            Path =
          }
          AccessProtocol.1
          {
            Host =
            PluginName = XROOT
            Protocol = root
            Path =
          }
        }
        StorageX
        {
          BackendType = local
          ReadAccess = Active
          WriteAccess = Active
          WriteProtocols = gsiftp
          AccessProtocols = root
          AccessProtocol.0
          {
            Host =
            PluginName = GSIFTP
            Protocol = gsiftp
            Path =
          }
          AccessProtocol.1
          {
            Host =
            PluginName = XROOT
            Protocol = root
            Path =
          }
        }
        StorageY
        {
          BackendType = local
          ReadAccess = Active
          WriteAccess = Active
          AccessProtocols = gsiftp
          AccessProtocols += srm
          AccessProtocol.0
          {
            Host =
            PluginName = GSIFTP
            Protocol = gsiftp
            Path =
          }
          AccessProtocol.1
          {
            Host =
            PluginName = SRM2
            Protocol = srm
            Path =
          }
        }
        StorageZ
        {
          BackendType = local
          ReadAccess = Active
          WriteAccess = Active
          AccessProtocols = root
          AccessProtocols += srm
          WriteProtocols = root
          WriteProtocols += srm
          AccessProtocol.0
          {
            Host =
            PluginName = ROOT
            Protocol = root
            Path =
          }
          AccessProtocol.1
          {
            Host =
            PluginName = SRM2
            Protocol = srm
            Path =
          }
        }
      }

    }
    Operations{
      Defaults
      {
        DataManagement{
          AccessProtocols = fakeProto
          AccessProtocols += root
          WriteProtocols = srm
        }
      }
    }
    '''

        with open(self.testCfgFileName, 'w') as f:
            f.write(cfgContent)

        # SUPER UGLY: one must recreate the CFG objects of gConfigurationData
        # not to conflict with other tests that might be using a local dirac.cfg
        gConfigurationData.localCFG = CFG()
        gConfigurationData.remoteCFG = CFG()
        gConfigurationData.mergedCFG = CFG()
        gConfigurationData.generateNewVersion()

        gConfig = ConfigurationClient(fileToLoadList=[
            self.testCfgFileName
        ])  # we replace the configuration by our own one.

        self.seA = StorageElementItem('StorageA')
        self.seA.vo = 'lhcb'
        self.seB = StorageElementItem('StorageB')
        self.seB.vo = 'lhcb'
        self.seC = StorageElementItem('StorageC')
        self.seC.vo = 'lhcb'
        self.seD = StorageElementItem('StorageD')
        self.seD.vo = 'lhcb'
        self.seE = StorageElementItem('StorageE')
        self.seE.vo = 'lhcb'

        self.seX = StorageElementItem('StorageX')
        self.seX.vo = 'lhcb'
        self.seY = StorageElementItem('StorageY')
        self.seY.vo = 'lhcb'
        self.seZ = StorageElementItem('StorageZ')
        self.seZ.vo = 'lhcb'
class TestBase( unittest.TestCase ):
  """ Base test class. Defines all the method to test
  """


  @mock.patch( 'DIRAC.Resources.Storage.StorageFactory.StorageFactory._StorageFactory__generateStorageObject',
                side_effect = mock_StorageFactory_generateStorageObject )
  @mock.patch( 'DIRAC.Resources.Storage.StorageElement.StorageElementItem._StorageElementItem__isLocalSE',
                return_value = S_OK( True ) )  # Pretend it's local
  @mock.patch( 'DIRAC.Resources.Storage.StorageElement.StorageElementItem.addAccountingOperation',
                return_value = None )  # Don't send accounting
  def setUp( self,
             _mk_generateStorage, _mk_isLocalSE, _mk_addAccountingOperation ):



    #Creating test configuration file
    self.testCfgFileName = os.path.join(tempfile.gettempdir(), 'test_StorageElement.cfg')
    cfgContent='''
    DIRAC
    {
      Setup=TestSetup
    }
    Resources{
      StorageElements{
        StorageA
        {
          BackendType = local
          ReadAccess = Active
          WriteAccess = Active
          AccessProtocol.0
          {
            Host =
            PluginName = File
            Protocol = file
            Path =
          }
        }
        StorageB
        {
          BackendType = local
          ReadAccess = Active
          WriteAccess = Active
          AccessProtocol.0
          {
            Host =
            PluginName = SRM2
            Protocol = srm
            Path =
          }
        }
        StorageC
        {
          BackendType = local
          ReadAccess = Active
          WriteAccess = Active
          AccessProtocol.0
          {
            Host =
            PluginName = XROOT
            Protocol = root
            Path =
          }
        }
        StorageD
        {
          BackendType = local
          ReadAccess = Active
          WriteAccess = Active
          AccessProtocol.0
          {
            Host =
            PluginName = SRM2
            Protocol = srm
            Path =
          }
          AccessProtocol.1
          {
            Host =
            PluginName = XROOT
            Protocol = root
            Path =
          }
        }
        StorageE
        {
          BackendType = local
          ReadAccess = Active
          WriteAccess = Active
          WriteProtocols = root
          WriteProtocols += srm
          AccessProtocol.0
          {
            Host =
            PluginName = SRM2
            Protocol = srm
            Path =
          }
          AccessProtocol.1
          {
            Host =
            PluginName = XROOT
            Protocol = root
            Path =
          }
        }
        StorageX
        {
          BackendType = local
          ReadAccess = Active
          WriteAccess = Active
          WriteProtocols = gsiftp
          AccessProtocols = root
          AccessProtocol.0
          {
            Host =
            PluginName = GSIFTP
            Protocol = gsiftp
            Path =
          }
          AccessProtocol.1
          {
            Host =
            PluginName = XROOT
            Protocol = root
            Path =
          }
        }
        StorageY
        {
          BackendType = local
          ReadAccess = Active
          WriteAccess = Active
          AccessProtocols = gsiftp
          AccessProtocols += srm
          AccessProtocol.0
          {
            Host =
            PluginName = GSIFTP
            Protocol = gsiftp
            Path =
          }
          AccessProtocol.1
          {
            Host =
            PluginName = SRM2
            Protocol = srm
            Path =
          }
        }
        StorageZ
        {
          BackendType = local
          ReadAccess = Active
          WriteAccess = Active
          AccessProtocols = root
          AccessProtocols += srm
          WriteProtocols = root
          WriteProtocols += srm
          AccessProtocol.0
          {
            Host =
            PluginName = ROOT
            Protocol = root
            Path =
          }
          AccessProtocol.1
          {
            Host =
            PluginName = SRM2
            Protocol = srm
            Path =
          }
        }
      }

    }
    Operations{
      Defaults
      {
        DataManagement{
          AccessProtocols = fakeProto
          AccessProtocols += root
          WriteProtocols = srm
        }
      }
    }
    '''

    with open(self.testCfgFileName, 'w') as f:
      f.write(cfgContent)

    # SUPER UGLY: one must recreate the CFG objects of gConfigurationData
    # not to conflict with other tests that might be using a local dirac.cfg
    gConfigurationData.localCFG=CFG()
    gConfigurationData.remoteCFG=CFG()
    gConfigurationData.mergedCFG=CFG()
    gConfigurationData.generateNewVersion()

    gConfig = ConfigurationClient(fileToLoadList = [self.testCfgFileName])  #we replace the configuration by our own one.

    self.seA = StorageElementItem( 'StorageA' )
    self.seA.vo = 'lhcb'
    self.seB = StorageElementItem( 'StorageB' )
    self.seB.vo = 'lhcb'
    self.seC = StorageElementItem( 'StorageC' )
    self.seC.vo = 'lhcb'
    self.seD = StorageElementItem( 'StorageD' )
    self.seD.vo = 'lhcb'
    self.seE = StorageElementItem( 'StorageE' )
    self.seE.vo = 'lhcb'

    self.seX = StorageElementItem( 'StorageX' )
    self.seX.vo = 'lhcb'
    self.seY = StorageElementItem( 'StorageY' )
    self.seY.vo = 'lhcb'
    self.seZ = StorageElementItem( 'StorageZ' )
    self.seZ.vo = 'lhcb'

  def tearDown( self ):
    try:
      os.remove(self.testCfgFileName)
    except OSError:
      pass
    # SUPER UGLY: one must recreate the CFG objects of gConfigurationData
    # not to conflict with other tests that might be using a local dirac.cfg
    gConfigurationData.localCFG=CFG()
    gConfigurationData.remoteCFG=CFG()
    gConfigurationData.mergedCFG=CFG()
    gConfigurationData.generateNewVersion()




  @mock.patch( 'DIRAC.Resources.Storage.StorageElement.StorageElementItem._StorageElementItem__isLocalSE',
                return_value = S_OK( True ) )  # Pretend it's local
  @mock.patch( 'DIRAC.Resources.Storage.StorageElement.StorageElementItem.addAccountingOperation',
                return_value = None )  # Don't send accounting
  def test_01_negociateProtocolWithOtherSE( self, mk_isLocalSE, mk_addAccounting ):
    """Testing negotiation algorithm"""

    # Find common protocol between SRM2 and File
    res = self.seA.negociateProtocolWithOtherSE( self.seB )
    self.assertTrue( res['OK'], res )
    self.assertEqual( res['Value'], ['file'] )

    # Find common protocol between File and SRM@
    res = self.seB.negociateProtocolWithOtherSE( self.seA )
    self.assertTrue( res['OK'], res )
    self.assertEqual( res['Value'], ['file'] )


    # Find common protocol between XROOT and File
    # Nothing goes from xroot to file
    res = self.seA.negociateProtocolWithOtherSE( self.seC )
    self.assertTrue( res['OK'], res )
    self.assertEqual( res['Value'], [] )

    # Find common protocol between File and XROOT
    res = self.seC.negociateProtocolWithOtherSE( self.seA )
    self.assertTrue( res['OK'], res )
    self.assertEqual( res['Value'], ['file'] )

    # Find common protocol between File and File
    res = self.seA.negociateProtocolWithOtherSE( self.seA )
    self.assertTrue( res['OK'], res )
    self.assertEqual( res['Value'], ['file'] )

    # Find common protocol between SRM and SRM
    res = self.seB.negociateProtocolWithOtherSE( self.seB )
    self.assertTrue( res['OK'], res )
    self.assertEqual( sorted( res['Value'] ), sorted( ['file', 'root', 'srm'] ) )


    # Find common protocol between SRM and XROOT
    res = self.seC.negociateProtocolWithOtherSE( self.seB )
    self.assertTrue( res['OK'], res )
    self.assertEqual( sorted( res['Value'] ), sorted( ['root', 'file'] ) )

    # Find common protocol between XROOT and SRM
    res = self.seC.negociateProtocolWithOtherSE( self.seB )
    self.assertTrue( res['OK'], res )
    self.assertEqual( sorted( res['Value'] ), sorted( ['root', 'file'] ) )

    # Testing restrictions
    res = self.seC.negociateProtocolWithOtherSE( self.seB, protocols = ['file'] )
    self.assertTrue( res['OK'], res )
    self.assertEqual( sorted( res['Value'] ), ['file'] )

    res = self.seC.negociateProtocolWithOtherSE( self.seB, protocols = ['nonexisting'] )
    self.assertTrue( res['OK'], res )
    self.assertEqual( res['Value'], [] )


  @mock.patch( 'DIRAC.Resources.Storage.StorageElement.StorageElementItem._StorageElementItem__isLocalSE',
                return_value = S_OK( True ) )  # Pretend it's local
  @mock.patch( 'DIRAC.Resources.Storage.StorageElement.StorageElementItem.addAccountingOperation',
                return_value = None )  # Don't send accounting
  def test_02_followOrder( self, _mk_isLocalSE, _mk_addAccounting ):
    """Testing If the order of preferred protocols is respected"""

    for permutation in itertools.permutations( ['srm', 'file', 'root', 'nonexisting'] ):
      permuList = list( permutation )
      # Don't get tricked ! remove cannot be put
      # after the conversion, because it is inplace modification
      permuList.remove( 'nonexisting' )
      res = self.seD.negociateProtocolWithOtherSE( self.seD, protocols = permutation )
      self.assertTrue( res['OK'], res )
      self.assertEqual( res['Value'], permuList )



  @mock.patch( 'DIRAC.Resources.Storage.StorageElement.StorageElementItem._StorageElementItem__isLocalSE',
                return_value = S_OK( True ) )  # Pretend it's local
  @mock.patch( 'DIRAC.Resources.Storage.StorageElement.StorageElementItem.addAccountingOperation',
                return_value = None )  # Don't send accounting
  def test_03_multiProtocolThirdParty( self, _mk_isLocalSE, _mk_addAccounting ):
    """
      Test case for storages with several protocols

      Here comes the fun :-)
      Suppose we have endpoints that we can read in root, but cannot write
      If we have root in the accessProtocols and thirdPartyProtocols lists
      but not in the writeProtocols, we should get a root url to read,
      and write with SRM

      We reproduce here the behavior of DataManager.replicate

    """

    thirdPartyProtocols = ['root', 'srm']

    lfn = '/lhcb/fake/lfn'
    res = self.seD.negociateProtocolWithOtherSE( self.seD, protocols = thirdPartyProtocols )
    self.assertTrue( res['OK'], res )
    self.assertEqual( res['Value'] , thirdPartyProtocols )


    # Only the XROOT plugin here implements the geTransportURL
    # that returns what we want, so we know that
    # if the return is successful, it is because of the XROOT
    res = self.seD.getURL( lfn, protocol = res['Value'] )
    self.assertTrue( res['OK'], res )
    self.assertTrue( lfn in res['Value']['Successful'], res )

    srcUrl = res['Value']['Successful'][lfn]
    self.assertEqual( srcUrl, "root:getTransportURL" )

    # Only the SRM2 plugin here implements the putFile method
    # so if we get a success here, it means that we used the SRM plugin
    res = self.seD.replicateFile( {lfn:srcUrl},
                                   sourceSize = 123,
                                   inputProtocol = 'root' )

    self.assertTrue( res['OK'], res )
    self.assertTrue( lfn in res['Value']['Successful'], res )
    self.assertEqual( res['Value']['Successful'][lfn], "srm:putFile" )


  @mock.patch( 'DIRAC.Resources.Storage.StorageElement.StorageElementItem._StorageElementItem__isLocalSE',
                return_value = S_OK( True ) )  # Pretend it's local
  @mock.patch( 'DIRAC.Resources.Storage.StorageElement.StorageElementItem.addAccountingOperation',
                return_value = None )  # Don't send accounting
  def test_04_thirdPartyLocalWrite( self, _mk_isLocalSE, _mk_addAccounting ):
    """
      Test case for storages with several protocols

      Here, we locally define the write protocol to be root and srm
      So we should be able to do everything with XROOT plugin

    """

    thirdPartyProtocols = ['root', 'srm']

    lfn = '/lhcb/fake/lfn'
    res = self.seE.negociateProtocolWithOtherSE( self.seE, protocols = thirdPartyProtocols )
    self.assertTrue( res['OK'], res )
    self.assertEqual( res['Value'] , thirdPartyProtocols )

    res = self.seE.getURL( lfn, protocol = res['Value'] )
    self.assertTrue( res['OK'], res )
    self.assertTrue( lfn in res['Value']['Successful'], res )

    srcUrl = res['Value']['Successful'][lfn]
    self.assertEqual( srcUrl, "root:getTransportURL" )

    res = self.seE.replicateFile( {lfn:srcUrl},
                                   sourceSize = 123,
                                   inputProtocol = 'root' )

    self.assertTrue( res['OK'], res )
    self.assertTrue( lfn in res['Value']['Successful'], res )
    self.assertEqual( res['Value']['Successful'][lfn], "root:putFile" )


  @mock.patch( 'DIRAC.Resources.Storage.StorageElement.StorageElementItem._StorageElementItem__isLocalSE',
                return_value = S_OK( True ) )  # Pretend it's local
  @mock.patch( 'DIRAC.Resources.Storage.StorageElement.StorageElementItem.addAccountingOperation',
                return_value = None )  # Don't send accounting
  def test_05_thirdPartyMix( self, _mk_isLocalSE, _mk_addAccounting ):
    """
      Test case for storages with several protocols

      Here, we locally define the write protocol for the destination, so it should
      all go directly through the XROOT plugin

    """

    thirdPartyProtocols = ['root', 'srm']

    lfn = '/lhcb/fake/lfn'
    res = self.seE.negociateProtocolWithOtherSE( self.seD, protocols = thirdPartyProtocols )
    self.assertTrue( res['OK'], res )
    self.assertEqual( res['Value'] , thirdPartyProtocols )

    res = self.seD.getURL( lfn, protocol = res['Value'] )
    self.assertTrue( res['OK'], res )
    self.assertTrue( lfn in res['Value']['Successful'], res )

    srcUrl = res['Value']['Successful'][lfn]
    self.assertEqual( srcUrl, "root:getTransportURL" )

    res = self.seE.replicateFile( {lfn:srcUrl},
                                   sourceSize = 123,
                                   inputProtocol = 'root' )

    self.assertTrue( res['OK'], res )
    self.assertTrue( lfn in res['Value']['Successful'], res )
    self.assertEqual( res['Value']['Successful'][lfn], "root:putFile" )


  @mock.patch( 'DIRAC.Resources.Storage.StorageElement.StorageElementItem._StorageElementItem__isLocalSE',
                return_value = S_OK( True ) )  # Pretend it's local
  @mock.patch( 'DIRAC.Resources.Storage.StorageElement.StorageElementItem.addAccountingOperation',
                return_value = None )  # Don't send accounting
  def test_06_thirdPartyMixOpposite( self, _mk_isLocalSE, _mk_addAccounting ):
    """
      Test case for storages with several protocols

      Here, we locally define the write protocol for the source, so it should
      get the source directly using XROOT, and perform the put using SRM

    """

    thirdPartyProtocols = ['root', 'srm']

    lfn = '/lhcb/fake/lfn'
    res = self.seD.negociateProtocolWithOtherSE( self.seE, protocols = thirdPartyProtocols )
    self.assertTrue( res['OK'], res )
    self.assertEqual( res['Value'] , thirdPartyProtocols )

    res = self.seE.getURL( lfn, protocol = res['Value'] )
    self.assertTrue( res['OK'], res )
    self.assertTrue( lfn in res['Value']['Successful'], res )

    srcUrl = res['Value']['Successful'][lfn]
    self.assertEqual( srcUrl, "root:getTransportURL" )

    res = self.seD.replicateFile( {lfn:srcUrl},
                                   sourceSize = 123,
                                   inputProtocol = 'root' )

    self.assertTrue( res['OK'], res )
    self.assertTrue( lfn in res['Value']['Successful'], res )
    self.assertEqual( res['Value']['Successful'][lfn], "srm:putFile" )


  @mock.patch( 'DIRAC.Resources.Storage.StorageElement.StorageElementItem._StorageElementItem__isLocalSE',
                return_value = S_OK( True ) )  # Pretend it's local
  @mock.patch( 'DIRAC.Resources.Storage.StorageElement.StorageElementItem.addAccountingOperation',
                return_value = None )  # Don't send accounting
  def test_07_multiProtocolSrmOnly( self, _mk_isLocalSE, _mk_addAccounting ):
    """
      Test case for storages with several protocols

      Here comes the fun :-)
      Suppose we have endpoints that we can read in root, but cannot write
      If we have root in the accessProtocols and thirdPartyProtocols lists
      but not in the writeProtocols, we should get a root url to read,
      and write with SRM

      We reproduce here the behavior of DataManager.replicate

    """

    thirdPartyProtocols = [ 'srm']

    print "negociate"
    lfn = '/lhcb/fake/lfn'
    res = self.seD.negociateProtocolWithOtherSE( self.seD, protocols = thirdPartyProtocols )
    self.assertTrue( res['OK'], res )
    self.assertEqual( res['Value'] , thirdPartyProtocols )

    print "get source url"

    res = self.seD.getURL( lfn, protocol = res['Value'] )
    self.assertTrue( res['OK'], res )
    self.assertTrue( lfn in res['Value']['Successful'], res )

    srcUrl = res['Value']['Successful'][lfn]
    self.assertEqual( srcUrl, "srm:getTransportURL" )

    print "replicate"
    res = self.seD.replicateFile( {lfn:srcUrl},
                                   sourceSize = 123,
                                   inputProtocol = 'srm' )

    self.assertTrue( res['OK'], res )
    self.assertTrue( lfn in res['Value']['Successful'], res )
    self.assertEqual( res['Value']['Successful'][lfn], "srm:putFile" )


  @mock.patch( 'DIRAC.Resources.Storage.StorageElement.StorageElementItem._StorageElementItem__isLocalSE',
                return_value = S_OK( True ) )  # Pretend it's local
  @mock.patch( 'DIRAC.Resources.Storage.StorageElement.StorageElementItem.addAccountingOperation',
                return_value = None )  # Don't send accounting
  def test_08_multiProtocolFTS( self, _mk_isLocalSE, _mk_addAccounting ):
    """
      Test case FTS replication between storages with several protocols

      Here comes the fun :-)
      Suppose we have endpoints that we can read in root, but cannot write
      If we have root in the accessProtocols and thirdPartyProtocols lists
      but not in the writeProtocols, we should get a root url to read,
      and write with SRM.
      And We should get the proper url for source and destination

      Storage X, Y and Z represents the situation we could now have in LHCb:
        * X is RAL Echo: you read with root, write with gsiftp
        * Y is Gridka: you have gsiftp available for read only
        * Z is CERN EOS: you can do everything with EOS

      This makes it necessary to add gsiftp as third party option to write to ECHO

    """


    thirdPartyProtocols = ['root', 'gsiftp', 'srm']

    lfn = '/lhcb/fake/lfn'

    # RAL -> GRIDKA
    # We should read using root and write through srm
    res = self.seY.generateTransferURLsBetweenSEs(lfn, self.seX)
    self.assertTrue( res['OK'], res )
    urlPair = res['Value']['Successful'].get(lfn)
    self.assertTupleEqual(urlPair, ('root:%s'%lfn, 'srm:%s'%lfn))

    # RAL -> CERN
    # We should read using root and write directly with it
    res = self.seZ.generateTransferURLsBetweenSEs(lfn, self.seX)
    self.assertTrue( res['OK'], res )
    urlPair = res['Value']['Successful'].get(lfn)
    self.assertTupleEqual(urlPair, ('root:%s'%lfn, 'root:%s'%lfn))

    # GRIDKA -> RAL
    # We should read using gsiftp and write directly with it
    res = self.seX.generateTransferURLsBetweenSEs(lfn, self.seY)
    self.assertTrue( res['OK'], res )
    urlPair = res['Value']['Successful'].get(lfn)
    self.assertTupleEqual(urlPair, ('gsiftp:%s'%lfn, 'gsiftp:%s'%lfn))

    # GRIDKA -> CERN
    # We should read using srm and write with root
    res = self.seZ.generateTransferURLsBetweenSEs(lfn, self.seY)
    self.assertTrue( res['OK'], res )
    urlPair = res['Value']['Successful'].get(lfn)
    self.assertTupleEqual(urlPair, ('srm:%s'%lfn, 'root:%s'%lfn))

    # CERN -> RAL
    # We should read using srm and write with gsiftp
    res = self.seX.generateTransferURLsBetweenSEs(lfn, self.seZ)
    self.assertTrue( res['OK'], res )
    urlPair = res['Value']['Successful'].get(lfn)
    self.assertTupleEqual(urlPair, ('srm:%s'%lfn, 'gsiftp:%s'%lfn))

    # CERN -> GRIDKA
    # We should read using root and write directly with srm
    res = self.seY.generateTransferURLsBetweenSEs(lfn, self.seZ)
    self.assertTrue( res['OK'], res )
    urlPair = res['Value']['Successful'].get(lfn)

    self.assertTupleEqual(urlPair, ('root:%s'%lfn, 'srm:%s'%lfn))
  def setUp( self,
             _mk_generateStorage, _mk_isLocalSE, _mk_addAccountingOperation ):



    #Creating test configuration file
    self.testCfgFileName = os.path.join(tempfile.gettempdir(), 'test_StorageElement.cfg')
    cfgContent='''
    DIRAC
    {
      Setup=TestSetup
    }
    Resources{
      StorageElements{
        StorageA
        {
          BackendType = local
          ReadAccess = Active
          WriteAccess = Active
          AccessProtocol.0
          {
            Host =
            PluginName = File
            Protocol = file
            Path =
          }
        }
        StorageB
        {
          BackendType = local
          ReadAccess = Active
          WriteAccess = Active
          AccessProtocol.0
          {
            Host =
            PluginName = SRM2
            Protocol = srm
            Path =
          }
        }
        StorageC
        {
          BackendType = local
          ReadAccess = Active
          WriteAccess = Active
          AccessProtocol.0
          {
            Host =
            PluginName = XROOT
            Protocol = root
            Path =
          }
        }
        StorageD
        {
          BackendType = local
          ReadAccess = Active
          WriteAccess = Active
          AccessProtocol.0
          {
            Host =
            PluginName = SRM2
            Protocol = srm
            Path =
          }
          AccessProtocol.1
          {
            Host =
            PluginName = XROOT
            Protocol = root
            Path =
          }
        }
        StorageE
        {
          BackendType = local
          ReadAccess = Active
          WriteAccess = Active
          WriteProtocols = root
          WriteProtocols += srm
          AccessProtocol.0
          {
            Host =
            PluginName = SRM2
            Protocol = srm
            Path =
          }
          AccessProtocol.1
          {
            Host =
            PluginName = XROOT
            Protocol = root
            Path =
          }
        }
        StorageX
        {
          BackendType = local
          ReadAccess = Active
          WriteAccess = Active
          WriteProtocols = gsiftp
          AccessProtocols = root
          AccessProtocol.0
          {
            Host =
            PluginName = GSIFTP
            Protocol = gsiftp
            Path =
          }
          AccessProtocol.1
          {
            Host =
            PluginName = XROOT
            Protocol = root
            Path =
          }
        }
        StorageY
        {
          BackendType = local
          ReadAccess = Active
          WriteAccess = Active
          AccessProtocols = gsiftp
          AccessProtocols += srm
          AccessProtocol.0
          {
            Host =
            PluginName = GSIFTP
            Protocol = gsiftp
            Path =
          }
          AccessProtocol.1
          {
            Host =
            PluginName = SRM2
            Protocol = srm
            Path =
          }
        }
        StorageZ
        {
          BackendType = local
          ReadAccess = Active
          WriteAccess = Active
          AccessProtocols = root
          AccessProtocols += srm
          WriteProtocols = root
          WriteProtocols += srm
          AccessProtocol.0
          {
            Host =
            PluginName = ROOT
            Protocol = root
            Path =
          }
          AccessProtocol.1
          {
            Host =
            PluginName = SRM2
            Protocol = srm
            Path =
          }
        }
      }

    }
    Operations{
      Defaults
      {
        DataManagement{
          AccessProtocols = fakeProto
          AccessProtocols += root
          WriteProtocols = srm
        }
      }
    }
    '''

    with open(self.testCfgFileName, 'w') as f:
      f.write(cfgContent)

    # SUPER UGLY: one must recreate the CFG objects of gConfigurationData
    # not to conflict with other tests that might be using a local dirac.cfg
    gConfigurationData.localCFG=CFG()
    gConfigurationData.remoteCFG=CFG()
    gConfigurationData.mergedCFG=CFG()
    gConfigurationData.generateNewVersion()

    gConfig = ConfigurationClient(fileToLoadList = [self.testCfgFileName])  #we replace the configuration by our own one.

    self.seA = StorageElementItem( 'StorageA' )
    self.seA.vo = 'lhcb'
    self.seB = StorageElementItem( 'StorageB' )
    self.seB.vo = 'lhcb'
    self.seC = StorageElementItem( 'StorageC' )
    self.seC.vo = 'lhcb'
    self.seD = StorageElementItem( 'StorageD' )
    self.seD.vo = 'lhcb'
    self.seE = StorageElementItem( 'StorageE' )
    self.seE.vo = 'lhcb'

    self.seX = StorageElementItem( 'StorageX' )
    self.seX.vo = 'lhcb'
    self.seY = StorageElementItem( 'StorageY' )
    self.seY.vo = 'lhcb'
    self.seZ = StorageElementItem( 'StorageZ' )
    self.seZ.vo = 'lhcb'
class UploadLogFile(ModuleBase):
  """ Handle log file uploads in the production jobs
  """
  #############################################################################
  def __init__(self):
    """Module initialization.
    """
    super(UploadLogFile, self).__init__()
    self.version = __RCSID__
    self.log = gLogger.getSubLogger('UploadLogFile')
    self.productionID = None
    self.jobID = None
    self.workflow_commons = None
    self.logFilePath = ""
    self.logLFNPath = ""
    self.logdir = ''
    self.logSE = StorageElement( self.ops.getValue('/LogStorage/LogSE', 'LogSE') )
    self.root = gConfig.getValue('/LocalSite/Root', os.getcwd())
    self.logSizeLimit = self.ops.getValue('/LogFiles/SizeLimit', 20 * 1024 * 1024)
    self.logExtensions = []
    self.failoverSEs = gConfig.getValue('/Resources/StorageElementGroups/Tier1-Failover', [])    
    self.experiment = 'CLIC'
    self.enable = True
    self.failoverTest = False #flag to put log files to failover by default
    self.jobID = ''

######################################################################
  def applicationSpecificInputs(self):
    """resolves the module parameters"""

    self.enable = self.step_commons.get('Enable', self.enable)
    if not isinstance(self.enable, bool):
      self.log.warn('Enable flag set to non-boolean value %s, setting to False' %self.enable)
      self.enable = False

    if not isinstance(self.failoverTest, bool):
      self.log.warn('Test failover flag set to non-boolean value %s, setting to False' % self.failoverTest)
      self.failoverTest = False

    self.jobID = os.environ.get('JOBID', self.jobID)
    if self.jobID != 0:
      self.log.verbose('Found WMS JobID = %s' % self.jobID)
    else:
      self.log.info('No WMS JobID found, disabling module via control flag')
      self.enable = False

    self.logFilePath = self.workflow_commons.get('LogFilePath', self.logFilePath)
    self.logLFNPath = self.workflow_commons.get('LogTargetPath', self.logLFNPath)
    if not (self.logFilePath and self.logLFNPath):
      self.log.info('LogFilePath parameter not found, creating on the fly')
      result = getLogPath(self.workflow_commons)
      if not result['OK']:
        self.log.error('Could not create LogFilePath', result['Message'])
        return result
      self.logFilePath = result['Value']['LogFilePath'][0]
      self.logLFNPath = result['Value']['LogTargetPath'][0]

    if not isinstance(self.logFilePath, basestring):
      self.logFilePath = self.logFilePath[0]
    if not isinstance(self.logLFNPath, basestring):
      self.logLFNPath = self.logLFNPath[0]
      
    #FIXME: What if experiments change path, pick this up from the CS instead, or find a better way
    example_file = self.logFilePath
    if "/ilc/prod/clic" in example_file:
      self.experiment = "CLIC"
    elif "/ilc/prod/ilc/sid" in example_file:
      self.experiment = 'ILC_SID'
    elif "/ilc/prod/ilc/mc-dbd" in example_file:
      self.experiment = 'ILC_ILD' 
    else:
      self.log.warn("Failed to determine experiment, reverting to default: %s" % self.experiment)

    return S_OK('Parameters resolved')

######################################################################
  def execute(self):
    """ Main execution method
    """
    self.log.info('Initializing %s' % self.version)
    # Add global reporting tool
    res = self.resolveInputVariables()
    if not res['OK']:
      self.log.error("Failed to resolve input parameters:", res['Message'])
      
    self.logWorkingDirectory()
    self.log.info('Job root is found to be %s' % (self.root))
    self.log.info('PRODUCTION_ID = %s, JOB_ID = %s '  % (self.productionID, self.jobID))
    self.logdir = os.path.realpath('./job/log/%s/%s' % (self.productionID, self.jobID))
    self.log.info('Selected log files will be temporarily stored in %s' % self.logdir)

    res = self.finalize()
    return res

  #############################################################################
  def finalize(self):
    """ finalize method performs final operations after all the job
        steps were executed. Only production jobs are treated.
    """
    
    self.log.verbose('Starting UploadLogFile finalize')
    self.log.debug("LogFilePath: %s" % self.logFilePath)
    self.log.debug("LogLFNPath:  %s" % self.logLFNPath)

    ##########################################
    # First determine the files which should be saved
    self.log.info('Determining the files to be saved in the logs.')
    resRelevantFiles = self._determineRelevantFiles()
    if not resRelevantFiles['OK']:
      self.log.error('Completely failed to select relevant log files.', resRelevantFiles['Message'])
      return S_OK()#because if the logs are lost, it's not the end of the world.

    selectedFiles = resRelevantFiles['Value']
    if not selectedFiles:
      self.log.info("No log files selected")
      return S_OK()

    self.log.info('The following %s files were selected to be saved:\n%s' % (len(selectedFiles),
                                                                             "\n".join(selectedFiles)))

    #########################################
    # Create a temporary directory containing these files
    self.log.info('Populating a temporary directory for selected files.')
    resLogDir = self._populateLogDirectory(selectedFiles)
    if not resLogDir['OK']:
      self.log.error('Completely failed to populate temporary log file directory.', resLogDir['Message'])
      self.setApplicationStatus('Failed To Populate Log Dir')
      return S_OK()#because if the logs are lost, it's not the end of the world.
    self.log.info('%s populated with log files.' % self.logdir)

    if not self.enable:
      self.log.info('Module is disabled by control flag')
      return S_OK('Module is disabled by control flag')

    #########################################
    #Make sure all the files in the log directory have the correct permissions
    result = self._setLogFilePermissions(self.logdir)
    if not result['OK']:
      self.log.error('Could not set permissions of log files to 0755 with message:\n%s' % (result['Message']))

    #########################################
    #Tar all the files
    resTarLogs = self._tarTheLogFiles()
    if not resTarLogs['OK']:
      return S_OK()#because if the logs are lost, it's not the end of the world.
    tarFileName = resTarLogs['Value']['fileName']

    #########################################
    # Attempt to upload log tar ball to the LogSE
    self.log.info('Transferring log tarball to the %s' % self.logSE.name)
    resTransfer = S_ERROR("Skipping log upload, because failoverTest")
    tarFileLFN = '%s/%s' % (self.logFilePath, tarFileName)
    tarFileLocal = os.path.realpath(os.path.join(self.logdir, tarFileName))
    if not self.failoverTest:
      self.log.info('PutFile %s %s %s' % (tarFileLFN, tarFileLocal, self.logSE.name))
      resTransfer = self.logSE.putFile({ tarFileLFN : tarFileLocal })
      self.log.debug("putFile result: %s" % resTransfer)
      if not resTransfer['OK']:
        self.log.error('Completely failed to upload log files to %s, will attempt upload to failover SE' % self.logSE.name,
                       resTransfer['Message'])
      elif resTransfer['OK'] and len(resTransfer['Value']['Failed']) == 0:
        self.log.info('Successfully upload log tarball to %s' % self.logSE.name)
        self.setJobParameter('Log LFN', tarFileLFN)
        self.log.info('Logs for this job may be retrieved with dirac-ilc-get-prod-log -F %s' % tarFileLFN)
        return S_OK()
      else:
        self.log.error('Completely failed to upload log files to %s, will attempt upload to failover SE' % self.logSE.name,
                       resTransfer['Value'])

    #########################################
    # Recover the logs to a failover storage element

    
    #if res['Value'][0]: #i.e. non-zero status
    #  self.log.error('Failed to create tar file from directory','%s %s' % (self.logdir,res['Value']))
    #  self.setApplicationStatus('Failed To Create Log Tar Dir')
    #  return S_OK()#because if the logs are lost, it's not the end of the world.

    ############################################################
    #Instantiate the failover transfer client with the global request object
    resFailover = self._tryFailoverTransfer(tarFileName, self.logdir)
    if not resFailover['Value']:
      return resFailover ##log files lost, but who cares...
    
    self.workflow_commons['Request'] = resFailover['Value']['Request']
    uploadedSE = resFailover['Value']['uploadedSE']
    res = self._createLogUploadRequest(self.logSE.name, self.logLFNPath, uploadedSE)
    if not res['OK']:
      self.log.error('Failed to create failover request', res['Message'])
      self.setApplicationStatus('Failed To Upload Logs To Failover')
    else:
      self.log.info('Successfully created failover request')
      self.log.info("Request %s" % self._getRequestContainer())
    return S_OK()

  #############################################################################
  def _determineRelevantFiles(self):
    """ The files which are below a configurable size will be stored in the logs.
        This will typically pick up everything in the working directory minus the output data files.
    """
    logFileExtensions = ['*.txt', '*.log', '*.out', '*.output', '*.xml', '*.sh', '*.info', '*.err','*.root']
    self.logExtensions = self.ops.getValue('/LogFiles/%s/Extensions' % self.experiment, [])

    if self.logExtensions:
      self.log.info('Using list of log extensions from CS:\n%s' % (', '.join(self.logExtensions)))
      logFileExtensions = self.logExtensions
    else:
      self.log.info('Using default list of log extensions:\n%s' % (', '.join(logFileExtensions)))

    candidateFiles = []
    for ext in logFileExtensions:
      self.log.debug('Looking at log file wildcard: %s' % ext)
      globList = glob.glob(ext)
      for check in globList:
        if os.path.isfile(check):
          self.log.debug('Found locally existing log file: %s' % check)
          candidateFiles.append(check)

    selectedFiles = []
    try:
      for candidate in candidateFiles:
        fileSize = os.stat(candidate)[6]
        if fileSize < self.logSizeLimit:
          selectedFiles.append(candidate)
        else:
          self.log.error('Log file found to be greater than maximum of %s bytes' % self.logSizeLimit, candidate)
      return S_OK(selectedFiles)
    except OSError as x:
      self.log.exception('Exception while determining files to save.', '', str(x))
      return S_ERROR('Could not determine log files')

  #############################################################################
  def _populateLogDirectory(self, selectedFiles):
    """ A temporary directory is created for all the selected files.
        These files are then copied into this directory before being uploaded
    """
    # Create the temporary directory
    try:
      if not os.path.exists(self.logdir):
        os.makedirs(self.logdir)
    except OSError as x:
      self.log.exception('PopulateLogDir: Exception while trying to create directory.', self.logdir, str(x))
      return S_ERROR()
    # Set proper permissions
    self.log.info('PopulateLogDir: Changing log directory %s permissions to 0755' % self.logdir )
    try:
      os.chmod(self.logdir, 0755)
    except OSError as x:
      self.log.error('PopulateLogDir: Could not set logdir permissions to 0755:', '%s (%s)' % ( self.logdir, str(x) ) )
    # Populate the temporary directory
    try:
      for myfile in selectedFiles:
        destinationFile = '%s/%s' % (self.logdir, os.path.basename(myfile))
        shutil.copy(myfile, destinationFile)
    except OSError as x:
      self.log.exception('PopulateLogDir: Exception while trying to copy file.', myfile, str(x))
      self.log.info('PopulateLogDir: File %s will be skipped and can be considered lost.' % myfile)

    # Now verify the contents of our target log dir
    successfulFiles = os.listdir(self.logdir)
    self.log.info("PopulateLogDir: successfulFiles %s" % successfulFiles)
    if len(successfulFiles) == 0:
      self.log.info('PopulateLogDir: Failed to copy any files to the target directory.')
      return S_ERROR()
    else:
      self.log.info('PopulateLogDir: Prepared %s files in the temporary directory.' % len(successfulFiles))
      return S_OK()
    
  #############################################################################
  def _createLogUploadRequest(self, targetSE, logFileLFN, uploadedSE):
    """ Set a request to upload job log files from the output sandbox
        Changed to be similar to LHCb createLogUploadRequest
        using LHCb LogUpload Request and Removal Request
    """
    self.log.info('Setting log upload request for %s at %s' %(targetSE, logFileLFN))
    request = self._getRequestContainer()

    logUpload = Operation()
    logUpload.Type = "LogUpload"
    logUpload.TargetSE = targetSE

    upFile = File()
    upFile.LFN = logFileLFN
    logUpload.addFile( upFile )

    logRemoval = Operation()
    logRemoval.Type = 'RemoveFile'
    logRemoval.TargetSE = uploadedSE
    logRemoval.addFile( upFile )

    request.addOperation ( logUpload )
    request.addOperation ( logRemoval )

    self.workflow_commons['Request'] = request

    return S_OK()

  #############################################################################
  def _setLogFilePermissions(self, logDir):
    """ Sets the permissions of all the files in the log directory to ensure
        they are readable.
    """
    try:
      for toChange in os.listdir(logDir):
        if not os.path.islink('%s/%s' % (logDir, toChange)):
          self.log.debug('Changing permissions of %s/%s to 0755' % (logDir, toChange))
          os.chmod('%s/%s' % (logDir, toChange), 0755)
    except OSError as x:
      self.log.error('Problem changing shared area permissions', str(x))
      return S_ERROR(x)

    return S_OK()

  ################################################################################
  def _tryFailoverTransfer(self, tarFileName, tarFileDir):
    """tries to upload the log tarBall to the failoverSE and creates moving request"""
    failoverTransfer = FailoverTransfer(self._getRequestContainer())
    ##determine the experiment
    self.failoverSEs = self.ops.getValue("Production/%s/FailOverSE" % self.experiment, self.failoverSEs)
    catalogs = self.ops.getValue('Production/%s/Catalogs' % self.experiment, ['FileCatalog', 'LcgFileCatalog'])

    random.shuffle(self.failoverSEs)
    self.log.info("Attempting to store file %s to the following SE(s):\n%s" % (tarFileName,
                                                                               ', '.join(self.failoverSEs )))
    result = failoverTransfer.transferAndRegisterFile(tarFileName, '%s/%s' % (tarFileDir, tarFileName), self.logLFNPath,
                                                      self.failoverSEs, fileMetaDict = { "GUID": None },
                                                      fileCatalog = catalogs )
    if not result['OK']:
      self.log.error('Failed to upload logs to all destinations')
      self.setApplicationStatus('Failed To Upload Logs')
      return S_OK() #because if the logs are lost, it's not the end of the world.

    #Now after all operations, return potentially modified request object
    return S_OK( {'Request': failoverTransfer.request, 'uploadedSE': result['Value']['uploadedSE']})

  def _tarTheLogFiles(self):
    """returns S_OK/S_ERROR and puts all the relevantFiles into a tarball"""
    self.logLFNPath = '%s.gz' % self.logLFNPath
    tarFileName = os.path.basename(self.logLFNPath)

    self.log.debug("Log Directory: %s" % self.logdir )
    self.log.debug("Log LFNPath:   %s" % self.logLFNPath )
    self.log.debug("TarFileName:   %s" % tarFileName )
    start = os.getcwd()
    os.chdir(self.logdir)
    logTarFiles = os.listdir(self.logdir)
    #comm = 'tar czvf %s %s' % (tarFileName,string.join(logTarFiles,' '))
    tfile = tarfile.open(tarFileName, "w:gz")
    for item in logTarFiles:
      self.log.info("Adding file %s to tarfile %s" %(item, tarFileName))
      tfile.add(item)
    tfile.close()
    resExists = S_OK() if os.path.exists(tarFileName) else S_ERROR("File was not created")
    os.chdir(start)
    self.log.debug("TarFileName: %s" %(tarFileName,))
    if resExists['OK']:
      return S_OK(dict(fileName=tarFileName))
    else:
      self.log.error('Failed to create tar file from directory','%s %s' % (self.logdir, resExists['Message']))
      self.setApplicationStatus('Failed To Create Log Tar Dir')
      return S_ERROR()
Exemple #24
0
class UploadLogFile(ModuleBase):
    """ Handle log file uploads in the production jobs
  """

    #############################################################################
    def __init__(self):
        """Module initialization.
    """
        super(UploadLogFile, self).__init__()
        self.version = __RCSID__
        self.log = gLogger.getSubLogger('UploadLogFile')
        self.productionID = None
        self.jobID = None
        self.workflow_commons = None
        self.logFilePath = ""
        self.logLFNPath = ""
        self.logdir = ''
        self.logSE = StorageElement(
            self.ops.getValue('/LogStorage/LogSE', 'LogSE'))
        self.root = gConfig.getValue('/LocalSite/Root', os.getcwd())
        self.logSizeLimit = self.ops.getValue('/LogFiles/SizeLimit',
                                              20 * 1024 * 1024)
        self.logExtensions = []
        self.failoverSEs = gConfig.getValue(
            '/Resources/StorageElementGroups/Tier1-Failover', [])
        self.experiment = 'CLIC'
        self.enable = True
        self.failoverTest = False  #flag to put log files to failover by default
        self.jobID = ''

######################################################################

    def applicationSpecificInputs(self):
        """resolves the module parameters"""

        self.enable = self.step_commons.get('Enable', self.enable)
        if not isinstance(self.enable, bool):
            self.log.warn(
                'Enable flag set to non-boolean value %s, setting to False' %
                self.enable)
            self.enable = False

        if not isinstance(self.failoverTest, bool):
            self.log.warn(
                'Test failover flag set to non-boolean value %s, setting to False'
                % self.failoverTest)
            self.failoverTest = False

        self.jobID = os.environ.get('JOBID', self.jobID)
        if self.jobID != 0:
            self.log.verbose('Found WMS JobID = %s' % self.jobID)
        else:
            self.log.info(
                'No WMS JobID found, disabling module via control flag')
            self.enable = False

        self.logFilePath = self.workflow_commons.get('LogFilePath',
                                                     self.logFilePath)
        self.logLFNPath = self.workflow_commons.get('LogTargetPath',
                                                    self.logLFNPath)
        if not (self.logFilePath and self.logLFNPath):
            self.log.info(
                'LogFilePath parameter not found, creating on the fly')
            result = getLogPath(self.workflow_commons)
            if not result['OK']:
                self.log.error('Could not create LogFilePath',
                               result['Message'])
                return result
            self.logFilePath = result['Value']['LogFilePath'][0]
            self.logLFNPath = result['Value']['LogTargetPath'][0]

        if not isinstance(self.logFilePath, basestring):
            self.logFilePath = self.logFilePath[0]
        if not isinstance(self.logLFNPath, basestring):
            self.logLFNPath = self.logLFNPath[0]

        #FIXME: What if experiments change path, pick this up from the CS instead, or find a better way
        example_file = self.logFilePath
        if "/ilc/prod/clic" in example_file:
            self.experiment = "CLIC"
        elif "/ilc/prod/ilc/sid" in example_file:
            self.experiment = 'ILC_SID'
        elif "/ilc/prod/ilc/mc-dbd" in example_file:
            self.experiment = 'ILC_ILD'
        else:
            self.log.warn(
                "Failed to determine experiment, reverting to default: %s" %
                self.experiment)

        return S_OK('Parameters resolved')

######################################################################

    def execute(self):
        """ Main execution method
    """
        self.log.info('Initializing %s' % self.version)
        # Add global reporting tool
        res = self.resolveInputVariables()
        if not res['OK']:
            self.log.error("Failed to resolve input parameters:",
                           res['Message'])

        self.logWorkingDirectory()
        self.log.info('Job root is found to be %s' % (self.root))
        self.log.info('PRODUCTION_ID = %s, JOB_ID = %s ' %
                      (self.productionID, self.jobID))
        self.logdir = os.path.realpath('./job/log/%s/%s' %
                                       (self.productionID, self.jobID))
        self.log.info('Selected log files will be temporarily stored in %s' %
                      self.logdir)

        res = self.finalize()
        return res

    #############################################################################
    def finalize(self):
        """ finalize method performs final operations after all the job
        steps were executed. Only production jobs are treated.
    """

        self.log.verbose('Starting UploadLogFile finalize')
        self.log.debug("LogFilePath: %s" % self.logFilePath)
        self.log.debug("LogLFNPath:  %s" % self.logLFNPath)

        ##########################################
        # First determine the files which should be saved
        self.log.info('Determining the files to be saved in the logs.')
        resRelevantFiles = self._determineRelevantFiles()
        if not resRelevantFiles['OK']:
            self.log.error('Completely failed to select relevant log files.',
                           resRelevantFiles['Message'])
            return S_OK(
            )  #because if the logs are lost, it's not the end of the world.

        selectedFiles = resRelevantFiles['Value']
        if not selectedFiles:
            self.log.info("No log files selected")
            return S_OK()

        self.log.info('The following %s files were selected to be saved:\n%s' %
                      (len(selectedFiles), "\n".join(selectedFiles)))

        #########################################
        # Create a temporary directory containing these files
        self.log.info('Populating a temporary directory for selected files.')
        resLogDir = self._populateLogDirectory(selectedFiles)
        if not resLogDir['OK']:
            self.log.error(
                'Completely failed to populate temporary log file directory.',
                resLogDir['Message'])
            self.setApplicationStatus('Failed To Populate Log Dir')
            return S_OK(
            )  #because if the logs are lost, it's not the end of the world.
        self.log.info('%s populated with log files.' % self.logdir)

        if not self.enable:
            self.log.info('Module is disabled by control flag')
            return S_OK('Module is disabled by control flag')

        #########################################
        #Make sure all the files in the log directory have the correct permissions
        result = self._setLogFilePermissions(self.logdir)
        if not result['OK']:
            self.log.error(
                'Could not set permissions of log files to 0755 with message:\n%s'
                % (result['Message']))

        #########################################
        #Tar all the files
        resTarLogs = self._tarTheLogFiles()
        if not resTarLogs['OK']:
            return S_OK(
            )  #because if the logs are lost, it's not the end of the world.
        tarFileName = resTarLogs['Value']['fileName']

        #########################################
        # Attempt to upload log tar ball to the LogSE
        self.log.info('Transferring log tarball to the %s' % self.logSE.name)
        resTransfer = S_ERROR("Skipping log upload, because failoverTest")
        tarFileLFN = '%s/%s' % (self.logFilePath, tarFileName)
        tarFileLocal = os.path.realpath(os.path.join(self.logdir, tarFileName))
        if not self.failoverTest:
            self.log.info('PutFile %s %s %s' %
                          (tarFileLFN, tarFileLocal, self.logSE.name))
            resTransfer = self.logSE.putFile({tarFileLFN: tarFileLocal})
            self.log.debug("putFile result: %s" % resTransfer)
            if not resTransfer['OK']:
                self.log.error(
                    'Completely failed to upload log files to %s, will attempt upload to failover SE'
                    % self.logSE.name, resTransfer['Message'])
            elif resTransfer['OK'] and len(
                    resTransfer['Value']['Failed']) == 0:
                self.log.info('Successfully upload log tarball to %s' %
                              self.logSE.name)
                self.setJobParameter('Log LFN', tarFileLFN)
                self.log.info(
                    'Logs for this job may be retrieved with dirac-ilc-get-prod-log -F %s'
                    % tarFileLFN)
                return S_OK()
            else:
                self.log.error(
                    'Completely failed to upload log files to %s, will attempt upload to failover SE'
                    % self.logSE.name, resTransfer['Value'])

        #########################################
        # Recover the logs to a failover storage element

        #if res['Value'][0]: #i.e. non-zero status
        #  self.log.error('Failed to create tar file from directory','%s %s' % (self.logdir,res['Value']))
        #  self.setApplicationStatus('Failed To Create Log Tar Dir')
        #  return S_OK()#because if the logs are lost, it's not the end of the world.

        ############################################################
        #Instantiate the failover transfer client with the global request object
        resFailover = self._tryFailoverTransfer(tarFileName, self.logdir)
        if not resFailover['Value']:
            return resFailover  ##log files lost, but who cares...

        self.workflow_commons['Request'] = resFailover['Value']['Request']
        uploadedSE = resFailover['Value']['uploadedSE']
        res = self._createLogUploadRequest(self.logSE.name, self.logLFNPath,
                                           uploadedSE)
        if not res['OK']:
            self.log.error('Failed to create failover request', res['Message'])
            self.setApplicationStatus('Failed To Upload Logs To Failover')
        else:
            self.log.info('Successfully created failover request')
            self.log.info("Request %s" % self._getRequestContainer())
        return S_OK()

    #############################################################################
    def _determineRelevantFiles(self):
        """ The files which are below a configurable size will be stored in the logs.
        This will typically pick up everything in the working directory minus the output data files.
    """
        logFileExtensions = [
            '*.txt', '*.log', '*.out', '*.output', '*.xml', '*.sh', '*.info',
            '*.err', '*.root'
        ]
        self.logExtensions = self.ops.getValue(
            '/LogFiles/%s/Extensions' % self.experiment, [])

        if self.logExtensions:
            self.log.info('Using list of log extensions from CS:\n%s' %
                          (', '.join(self.logExtensions)))
            logFileExtensions = self.logExtensions
        else:
            self.log.info('Using default list of log extensions:\n%s' %
                          (', '.join(logFileExtensions)))

        candidateFiles = []
        for ext in logFileExtensions:
            self.log.debug('Looking at log file wildcard: %s' % ext)
            globList = glob.glob(ext)
            for check in globList:
                if os.path.isfile(check):
                    self.log.debug('Found locally existing log file: %s' %
                                   check)
                    candidateFiles.append(check)

        selectedFiles = []
        try:
            for candidate in candidateFiles:
                fileSize = os.stat(candidate)[6]
                if fileSize < self.logSizeLimit:
                    selectedFiles.append(candidate)
                else:
                    self.log.error(
                        'Log file found to be greater than maximum of %s bytes'
                        % self.logSizeLimit, candidate)
            return S_OK(selectedFiles)
        except OSError as x:
            self.log.exception('Exception while determining files to save.',
                               '', str(x))
            return S_ERROR('Could not determine log files')

    #############################################################################
    def _populateLogDirectory(self, selectedFiles):
        """ A temporary directory is created for all the selected files.
        These files are then copied into this directory before being uploaded
    """
        # Create the temporary directory
        try:
            if not os.path.exists(self.logdir):
                os.makedirs(self.logdir)
        except OSError as x:
            self.log.exception(
                'PopulateLogDir: Exception while trying to create directory.',
                self.logdir, str(x))
            return S_ERROR()
        # Set proper permissions
        self.log.info(
            'PopulateLogDir: Changing log directory %s permissions to 0755' %
            self.logdir)
        try:
            os.chmod(self.logdir, 0755)
        except OSError as x:
            self.log.error(
                'PopulateLogDir: Could not set logdir permissions to 0755:',
                '%s (%s)' % (self.logdir, str(x)))
        # Populate the temporary directory
        try:
            for myfile in selectedFiles:
                destinationFile = '%s/%s' % (self.logdir,
                                             os.path.basename(myfile))
                shutil.copy(myfile, destinationFile)
        except OSError as x:
            self.log.exception(
                'PopulateLogDir: Exception while trying to copy file.', myfile,
                str(x))
            self.log.info(
                'PopulateLogDir: File %s will be skipped and can be considered lost.'
                % myfile)

        # Now verify the contents of our target log dir
        successfulFiles = os.listdir(self.logdir)
        self.log.info("PopulateLogDir: successfulFiles %s" % successfulFiles)
        if len(successfulFiles) == 0:
            self.log.info(
                'PopulateLogDir: Failed to copy any files to the target directory.'
            )
            return S_ERROR()
        else:
            self.log.info(
                'PopulateLogDir: Prepared %s files in the temporary directory.'
                % len(successfulFiles))
            return S_OK()

    #############################################################################
    def _createLogUploadRequest(self, targetSE, logFileLFN, uploadedSE):
        """ Set a request to upload job log files from the output sandbox
        Changed to be similar to LHCb createLogUploadRequest
        using LHCb LogUpload Request and Removal Request
    """
        self.log.info('Setting log upload request for %s at %s' %
                      (targetSE, logFileLFN))
        request = self._getRequestContainer()

        logUpload = Operation()
        logUpload.Type = "LogUpload"
        logUpload.TargetSE = targetSE

        upFile = File()
        upFile.LFN = logFileLFN
        logUpload.addFile(upFile)

        logRemoval = Operation()
        logRemoval.Type = 'RemoveFile'
        logRemoval.TargetSE = uploadedSE
        logRemoval.addFile(upFile)

        request.addOperation(logUpload)
        request.addOperation(logRemoval)

        self.workflow_commons['Request'] = request

        return S_OK()

    #############################################################################
    def _setLogFilePermissions(self, logDir):
        """ Sets the permissions of all the files in the log directory to ensure
        they are readable.
    """
        try:
            for toChange in os.listdir(logDir):
                if not os.path.islink('%s/%s' % (logDir, toChange)):
                    self.log.debug('Changing permissions of %s/%s to 0755' %
                                   (logDir, toChange))
                    os.chmod('%s/%s' % (logDir, toChange), 0755)
        except OSError as x:
            self.log.error('Problem changing shared area permissions', str(x))
            return S_ERROR(x)

        return S_OK()

    ################################################################################
    def _tryFailoverTransfer(self, tarFileName, tarFileDir):
        """tries to upload the log tarBall to the failoverSE and creates moving request"""
        failoverTransfer = FailoverTransfer(self._getRequestContainer())
        ##determine the experiment
        self.failoverSEs = self.ops.getValue(
            "Production/%s/FailOverSE" % self.experiment, self.failoverSEs)
        catalogs = self.ops.getValue(
            'Production/%s/Catalogs' % self.experiment,
            ['FileCatalog', 'LcgFileCatalog'])

        random.shuffle(self.failoverSEs)
        self.log.info(
            "Attempting to store file %s to the following SE(s):\n%s" %
            (tarFileName, ', '.join(self.failoverSEs)))
        result = failoverTransfer.transferAndRegisterFile(
            tarFileName,
            '%s/%s' % (tarFileDir, tarFileName),
            self.logLFNPath,
            self.failoverSEs,
            fileMetaDict={"GUID": None},
            fileCatalog=catalogs)
        if not result['OK']:
            self.log.error('Failed to upload logs to all destinations')
            self.setApplicationStatus('Failed To Upload Logs')
            return S_OK(
            )  #because if the logs are lost, it's not the end of the world.

        #Now after all operations, return potentially modified request object
        return S_OK({
            'Request': failoverTransfer.request,
            'uploadedSE': result['Value']['uploadedSE']
        })

    def _tarTheLogFiles(self):
        """returns S_OK/S_ERROR and puts all the relevantFiles into a tarball"""
        self.logLFNPath = '%s.gz' % self.logLFNPath
        tarFileName = os.path.basename(self.logLFNPath)

        self.log.debug("Log Directory: %s" % self.logdir)
        self.log.debug("Log LFNPath:   %s" % self.logLFNPath)
        self.log.debug("TarFileName:   %s" % tarFileName)
        start = os.getcwd()
        os.chdir(self.logdir)
        logTarFiles = os.listdir(self.logdir)
        #comm = 'tar czvf %s %s' % (tarFileName,string.join(logTarFiles,' '))
        tfile = tarfile.open(tarFileName, "w:gz")
        for item in logTarFiles:
            self.log.info("Adding file %s to tarfile %s" % (item, tarFileName))
            tfile.add(item)
        tfile.close()
        resExists = S_OK() if os.path.exists(tarFileName) else S_ERROR(
            "File was not created")
        os.chdir(start)
        self.log.debug("TarFileName: %s" % (tarFileName, ))
        if resExists['OK']:
            return S_OK(dict(fileName=tarFileName))
        else:
            self.log.error('Failed to create tar file from directory',
                           '%s %s' % (self.logdir, resExists['Message']))
            self.setApplicationStatus('Failed To Create Log Tar Dir')
            return S_ERROR()
Exemple #25
0
class TestBase( unittest.TestCase ):
  """ Base test class. Defines all the method to test
  """


  @mock.patch( 'DIRAC.Resources.Storage.StorageFactory.StorageFactory._getConfigStorageName',
                side_effect = mock_StorageFactory_getConfigStorageName )
  @mock.patch( 'DIRAC.Resources.Storage.StorageFactory.StorageFactory._getConfigStorageOptions',
                side_effect = mock_StorageFactory_getConfigStorageOptions )
  @mock.patch( 'DIRAC.Resources.Storage.StorageFactory.StorageFactory._getConfigStorageProtocols',
                side_effect = mock_StorageFactory_getConfigStorageProtocols )
  @mock.patch( 'DIRAC.Resources.Storage.StorageFactory.StorageFactory._StorageFactory__generateStorageObject',
                side_effect = mock_StorageFactory_generateStorageObject )
  @mock.patch( 'DIRAC.Resources.Storage.StorageElement.StorageElementItem._StorageElementItem__isLocalSE',
                return_value = S_OK( True ) )  # Pretend it's local
  @mock.patch( 'DIRAC.Resources.Storage.StorageElement.StorageElementItem.addAccountingOperation',
                return_value = None )  # Don't send accounting
  @mock.patch( 'DIRAC.Resources.Storage.StorageElement.DMSHelpers', side_effect = fake_DMSHelpers )
  def setUp( self, _mk_getConfigStorageName, _mk_getConfigStorageOptions, _mk_getConfigStorageProtocols,
             _mk_generateStorage, _mk_isLocalSE, _mk_addAccountingOperation, _mk_dmsHelpers ):
    self.seA = StorageElementItem( 'StorageA' )
    self.seA.vo = 'lhcb'
    self.seB = StorageElementItem( 'StorageB' )
    self.seB.vo = 'lhcb'
    self.seC = StorageElementItem( 'StorageC' )
    self.seC.vo = 'lhcb'
    self.seD = StorageElementItem( 'StorageD' )
    self.seD.vo = 'lhcb'
    self.seE = StorageElementItem( 'StorageE' )
    self.seE.vo = 'lhcb'



  def tearDown( self ):
    pass



  @mock.patch( 'DIRAC.Resources.Storage.StorageElement.StorageElementItem._StorageElementItem__isLocalSE',
                return_value = S_OK( True ) )  # Pretend it's local
  @mock.patch( 'DIRAC.Resources.Storage.StorageElement.StorageElementItem.addAccountingOperation',
                return_value = None )  # Don't send accounting
  def test_01_negociateProtocolWithOtherSE( self, mk_isLocalSE, mk_addAccounting ):
    """Testing negotiation algorithm"""

    # Find common protocol between SRM2 and File
    res = self.seA.negociateProtocolWithOtherSE( self.seB )
    self.assertTrue( res['OK'], res )
    self.assertEqual( res['Value'], ['file'] )

    # Find common protocol between File and SRM@
    res = self.seB.negociateProtocolWithOtherSE( self.seA )
    self.assertTrue( res['OK'], res )
    self.assertEqual( res['Value'], ['file'] )


    # Find common protocol between XROOT and File
    # Nothing goes from xroot to file
    res = self.seA.negociateProtocolWithOtherSE( self.seC )
    self.assertTrue( res['OK'], res )
    self.assertEqual( res['Value'], [] )

    # Find common protocol between File and XROOT
    res = self.seC.negociateProtocolWithOtherSE( self.seA )
    self.assertTrue( res['OK'], res )
    self.assertEqual( res['Value'], ['file'] )

    # Find common protocol between File and File
    res = self.seA.negociateProtocolWithOtherSE( self.seA )
    self.assertTrue( res['OK'], res )
    self.assertEqual( res['Value'], ['file'] )

    # Find common protocol between SRM and SRM
    res = self.seB.negociateProtocolWithOtherSE( self.seB )
    self.assertTrue( res['OK'], res )
    self.assertEqual( sorted( res['Value'] ), sorted( ['file', 'root', 'srm'] ) )


    # Find common protocol between SRM and XROOT
    res = self.seC.negociateProtocolWithOtherSE( self.seB )
    self.assertTrue( res['OK'], res )
    self.assertEqual( sorted( res['Value'] ), sorted( ['root', 'file'] ) )

    # Find common protocol between XROOT and SRM
    res = self.seC.negociateProtocolWithOtherSE( self.seB )
    self.assertTrue( res['OK'], res )
    self.assertEqual( sorted( res['Value'] ), sorted( ['root', 'file'] ) )

    # Testing restrictions
    res = self.seC.negociateProtocolWithOtherSE( self.seB, protocols = ['file'] )
    self.assertTrue( res['OK'], res )
    self.assertEqual( sorted( res['Value'] ), ['file'] )

    res = self.seC.negociateProtocolWithOtherSE( self.seB, protocols = ['nonexisting'] )
    self.assertTrue( res['OK'], res )
    self.assertEqual( res['Value'], [] )


  @mock.patch( 'DIRAC.Resources.Storage.StorageElement.StorageElementItem._StorageElementItem__isLocalSE',
                return_value = S_OK( True ) )  # Pretend it's local
  @mock.patch( 'DIRAC.Resources.Storage.StorageElement.StorageElementItem.addAccountingOperation',
                return_value = None )  # Don't send accounting
  def test_02_followOrder( self, _mk_isLocalSE, _mk_addAccounting ):
    """Testing If the order of preferred protocols is respected"""

    for permutation in itertools.permutations( ['srm', 'file', 'root', 'nonexisting'] ):
      permuList = list( permutation )
      # Don't get tricked ! remove cannot be put
      # after the conversion, because it is inplace modification
      permuList.remove( 'nonexisting' )
      res = self.seD.negociateProtocolWithOtherSE( self.seD, protocols = permutation )
      self.assertTrue( res['OK'], res )
      self.assertEqual( res['Value'], permuList )



  @mock.patch( 'DIRAC.Resources.Storage.StorageElement.StorageElementItem._StorageElementItem__isLocalSE',
                return_value = S_OK( True ) )  # Pretend it's local
  @mock.patch( 'DIRAC.Resources.Storage.StorageElement.StorageElementItem.addAccountingOperation',
                return_value = None )  # Don't send accounting
  def test_03_multiProtocolThirdParty( self, _mk_isLocalSE, _mk_addAccounting ):
    """
      Test case for storages with several protocols

      Here comes the fun :-)
      Suppose we have endpoints that we can read in root, but cannot write
      If we have root in the accessProtocols and thirdPartyProtocols lists
      but not in the writeProtocols, we should get a root url to read,
      and write with SRM

      We reproduce here the behavior of DataManager.replicate

    """

    thirdPartyProtocols = ['root', 'srm']

    lfn = '/lhcb/fake/lfn'
    res = self.seD.negociateProtocolWithOtherSE( self.seD, protocols = thirdPartyProtocols )
    self.assertTrue( res['OK'], res )
    self.assertEqual( res['Value'] , thirdPartyProtocols )


    # Only the XROOT plugin here implements the geTransportURL
    # that returns what we want, so we know that
    # if the return is successful, it is because of the XROOT
    res = self.seD.getURL( lfn, protocol = res['Value'] )
    self.assertTrue( res['OK'], res )
    self.assertTrue( lfn in res['Value']['Successful'], res )

    srcUrl = res['Value']['Successful'][lfn]
    self.assertEqual( srcUrl, "root:getTransportURL" )

    # Only the SRM2 plugin here implements the putFile method
    # so if we get a success here, it means that we used the SRM plugin
    res = self.seD.replicateFile( {lfn:srcUrl},
                                   sourceSize = 123,
                                   inputProtocol = 'root' )

    self.assertTrue( res['OK'], res )
    self.assertTrue( lfn in res['Value']['Successful'], res )
    self.assertEqual( res['Value']['Successful'][lfn], "srm:putFile" )


  @mock.patch( 'DIRAC.Resources.Storage.StorageElement.StorageElementItem._StorageElementItem__isLocalSE',
                return_value = S_OK( True ) )  # Pretend it's local
  @mock.patch( 'DIRAC.Resources.Storage.StorageElement.StorageElementItem.addAccountingOperation',
                return_value = None )  # Don't send accounting
  def test_04_thirdPartyLocalWrite( self, _mk_isLocalSE, _mk_addAccounting ):
    """
      Test case for storages with several protocols

      Here, we locally define the write protocol to be root and srm
      So we should be able to do everything with XROOT plugin

    """

    thirdPartyProtocols = ['root', 'srm']

    lfn = '/lhcb/fake/lfn'
    res = self.seE.negociateProtocolWithOtherSE( self.seE, protocols = thirdPartyProtocols )
    self.assertTrue( res['OK'], res )
    self.assertEqual( res['Value'] , thirdPartyProtocols )

    res = self.seE.getURL( lfn, protocol = res['Value'] )
    self.assertTrue( res['OK'], res )
    self.assertTrue( lfn in res['Value']['Successful'], res )

    srcUrl = res['Value']['Successful'][lfn]
    self.assertEqual( srcUrl, "root:getTransportURL" )

    res = self.seE.replicateFile( {lfn:srcUrl},
                                   sourceSize = 123,
                                   inputProtocol = 'root' )

    self.assertTrue( res['OK'], res )
    self.assertTrue( lfn in res['Value']['Successful'], res )
    self.assertEqual( res['Value']['Successful'][lfn], "root:putFile" )


  @mock.patch( 'DIRAC.Resources.Storage.StorageElement.StorageElementItem._StorageElementItem__isLocalSE',
                return_value = S_OK( True ) )  # Pretend it's local
  @mock.patch( 'DIRAC.Resources.Storage.StorageElement.StorageElementItem.addAccountingOperation',
                return_value = None )  # Don't send accounting
  def test_05_thirdPartyMix( self, _mk_isLocalSE, _mk_addAccounting ):
    """
      Test case for storages with several protocols

      Here, we locally define the write protocol for the destination, so it should
      all go directly through the XROOT plugin

    """

    thirdPartyProtocols = ['root', 'srm']

    lfn = '/lhcb/fake/lfn'
    res = self.seE.negociateProtocolWithOtherSE( self.seD, protocols = thirdPartyProtocols )
    self.assertTrue( res['OK'], res )
    self.assertEqual( res['Value'] , thirdPartyProtocols )

    res = self.seD.getURL( lfn, protocol = res['Value'] )
    self.assertTrue( res['OK'], res )
    self.assertTrue( lfn in res['Value']['Successful'], res )

    srcUrl = res['Value']['Successful'][lfn]
    self.assertEqual( srcUrl, "root:getTransportURL" )

    res = self.seE.replicateFile( {lfn:srcUrl},
                                   sourceSize = 123,
                                   inputProtocol = 'root' )

    self.assertTrue( res['OK'], res )
    self.assertTrue( lfn in res['Value']['Successful'], res )
    self.assertEqual( res['Value']['Successful'][lfn], "root:putFile" )


  @mock.patch( 'DIRAC.Resources.Storage.StorageElement.StorageElementItem._StorageElementItem__isLocalSE',
                return_value = S_OK( True ) )  # Pretend it's local
  @mock.patch( 'DIRAC.Resources.Storage.StorageElement.StorageElementItem.addAccountingOperation',
                return_value = None )  # Don't send accounting
  def test_06_thirdPartyMixOpposite( self, _mk_isLocalSE, _mk_addAccounting ):
    """
      Test case for storages with several protocols

      Here, we locally define the write protocol for the source, so it should
      get the source directly using XROOT, and perform the put using SRM

    """

    thirdPartyProtocols = ['root', 'srm']

    lfn = '/lhcb/fake/lfn'
    res = self.seD.negociateProtocolWithOtherSE( self.seE, protocols = thirdPartyProtocols )
    self.assertTrue( res['OK'], res )
    self.assertEqual( res['Value'] , thirdPartyProtocols )

    res = self.seE.getURL( lfn, protocol = res['Value'] )
    self.assertTrue( res['OK'], res )
    self.assertTrue( lfn in res['Value']['Successful'], res )

    srcUrl = res['Value']['Successful'][lfn]
    self.assertEqual( srcUrl, "root:getTransportURL" )

    res = self.seD.replicateFile( {lfn:srcUrl},
                                   sourceSize = 123,
                                   inputProtocol = 'root' )

    self.assertTrue( res['OK'], res )
    self.assertTrue( lfn in res['Value']['Successful'], res )
    self.assertEqual( res['Value']['Successful'][lfn], "srm:putFile" )


  @mock.patch( 'DIRAC.Resources.Storage.StorageElement.StorageElementItem._StorageElementItem__isLocalSE',
                return_value = S_OK( True ) )  # Pretend it's local
  @mock.patch( 'DIRAC.Resources.Storage.StorageElement.StorageElementItem.addAccountingOperation',
                return_value = None )  # Don't send accounting
  def test_07_multiProtocolSrmOnly( self, _mk_isLocalSE, _mk_addAccounting ):
    """
      Test case for storages with several protocols

      Here comes the fun :-)
      Suppose we have endpoints that we can read in root, but cannot write
      If we have root in the accessProtocols and thirdPartyProtocols lists
      but not in the writeProtocols, we should get a root url to read,
      and write with SRM

      We reproduce here the behavior of DataManager.replicate

    """

    thirdPartyProtocols = [ 'srm']

    print "negociate"
    lfn = '/lhcb/fake/lfn'
    res = self.seD.negociateProtocolWithOtherSE( self.seD, protocols = thirdPartyProtocols )
    self.assertTrue( res['OK'], res )
    self.assertEqual( res['Value'] , thirdPartyProtocols )

    print "get source url"

    res = self.seD.getURL( lfn, protocol = res['Value'] )
    self.assertTrue( res['OK'], res )
    self.assertTrue( lfn in res['Value']['Successful'], res )

    srcUrl = res['Value']['Successful'][lfn]
    self.assertEqual( srcUrl, "srm:getTransportURL" )

    print "replicate"
    res = self.seD.replicateFile( {lfn:srcUrl},
                                   sourceSize = 123,
                                   inputProtocol = 'srm' )

    self.assertTrue( res['OK'], res )
    self.assertTrue( lfn in res['Value']['Successful'], res )
    self.assertEqual( res['Value']['Successful'][lfn], "srm:putFile" )