Пример #1
0
  def resolveLFNZeroReplicas( self, problematicDict ):
    """ This takes the problematic dictionary returned by the integrity DB and resolves the LFNZeroReplicas prognosis
    """
    lfn = problematicDict['LFN']
    fileID = problematicDict['FileID']

    res = Utils.executeSingleFileOrDirWrapper( self.fc.getReplicas( lfn, allStatus = True ) )
    if res['OK'] and res['Value']:
      gLogger.info( "LFNZeroReplicas file (%d) found to have replicas" % fileID )
    else:
      gLogger.info( "LFNZeroReplicas file (%d) does not have replicas. Checking storage..." % fileID )
      pfnsFound = False
      for storageElementName in sortList( gConfig.getValue( 'Resources/StorageElementGroups/Tier1_MC_M-DST', [] ) ):
        res = self.__getStoragePathExists( [lfn], storageElementName )
        if res['Value'].has_key( lfn ):
          gLogger.info( "LFNZeroReplicas file (%d) found storage file at %s" % ( fileID, storageElementName ) )
          pfn = res['Value'][lfn]
          self.__reportProblematicReplicas( [( lfn, pfn, storageElementName, 'PFNNotRegistered' )], storageElementName, 'PFNNotRegistered' )
          pfnsFound = True
      if not pfnsFound:
        gLogger.info( "LFNZeroReplicas file (%d) did not have storage files. Removing..." % fileID )
        res = Utils.executeSingleFileOrDirWrapper( self.fc.removeFile( lfn ) )
        if not res['OK']:
          gLogger.error( res['Message'] )
          # Increment the number of retries for this file
          self.server.incrementProblematicRetry( fileID )
          return res
        gLogger.info( "LFNZeroReplicas file (%d) removed from catalog" % fileID )
    # If we get here the problem is solved so we can update the integrityDB
    return self.__updateCompletedFiles( 'LFNZeroReplicas', fileID )
Пример #2
0
  def resolvePFNMissing( self, problematicDict ):
    """ This takes the problematic dictionary returned by the integrity DB and resolved the PFNMissing prognosis
    """
    pfn = problematicDict['PFN']
    se = problematicDict['SE']
    lfn = problematicDict['LFN']
    fileID = problematicDict['FileID']

    res = Utils.executeSingleFileOrDirWrapper( self.fc.exists( lfn ) )
    if not res['OK']:
      return self.__returnProblematicError( fileID, res )
    if not res['Value']:
      gLogger.info( "PFNMissing file (%d) no longer exists in catalog" % fileID )
      return self.__updateCompletedFiles( 'PFNMissing', fileID )

    res = Utils.executeSingleFileOrDirWrapper( StorageElement( se ).exists( pfn ) )
    if not res['OK']:
      return self.__returnProblematicError( fileID, res )
    if res['Value']:
      gLogger.info( "PFNMissing replica (%d) is no longer missing" % fileID )
      return self.__updateReplicaToChecked( problematicDict )
    gLogger.info( "PFNMissing replica (%d) does not exist" % fileID )
    res = Utils.executeSingleFileOrDirWrapper( self.fc.getReplicas( lfn, allStatus = True ) )
    if not res['OK']:
      return self.__returnProblematicError( fileID, res )
    replicas = res['Value']
    seSite = se.split( '_' )[0].split( '-' )[0]
    found = False
    print replicas
    for replicaSE in replicas.keys():
      if re.search( seSite, replicaSE ):
        found = True
        problematicDict['SE'] = replicaSE
        se = replicaSE
    if not found:
      gLogger.info( "PFNMissing replica (%d) is no longer registered at SE. Resolved." % fileID )
      return self.__updateCompletedFiles( 'PFNMissing', fileID )
    gLogger.info( "PFNMissing replica (%d) does not exist. Removing from catalog..." % fileID )
    res = Utils.executeSingleFileOrDirWrapper( self.fc.removeReplica( {lfn:problematicDict} ) )
    if not res['OK']:
      return self.__returnProblematicError( fileID, res )
    if len( replicas ) == 1:
      gLogger.info( "PFNMissing replica (%d) had a single replica. Updating prognosis" % fileID )
      return self.changeProblematicPrognosis( fileID, 'LFNZeroReplicas' )
    res = self.dm.replicateAndRegister( problematicDict['LFN'], se )
    if not res['OK']:
      return self.__returnProblematicError( fileID, res )
    # If we get here the problem is solved so we can update the integrityDB
    return self.__updateCompletedFiles( 'PFNMissing', fileID )
Пример #3
0
  def __registerSuccessful( self, transLFNs ):
    """ register successfully transferred files to the catalogs,
    fill failedRegistrations dict for files that failed to register

    :param self: self reference
    :param list transLFNs: LFNs in FTS job
    """
    self.failedRegistrations = {}
    toRegister = {}
    for lfn in transLFNs:
      res = Utils.executeSingleFileOrDirWrapper( self.oTargetSE.getPfnForProtocol( self.fileDict[lfn].get( 'Target' ), protocol = 'SRM2', withPort = False ) )
      if not res['OK']:
        self.__setFileParameter( lfn, 'Reason', res['Message'] )
        self.__setFileParameter( lfn, 'Status', 'Failed' )
      else:
        toRegister[lfn] = { 'PFN' : res['Value'], 'SE' : self.targetSE }
    if not toRegister:
      return S_OK( ( 0, 0 ) )
    res = self.__getCatalogObject()
    if not res['OK']:
      for lfn in toRegister:
        self.failedRegistrations = toRegister
        self.log.error( 'Failed to get Catalog Object', res['Message'] )
        return S_OK( ( 0, len( toRegister ) ) )
    res = self.oCatalog.addReplica( toRegister )
    if not res['OK']:
      self.failedRegistrations = toRegister
      self.log.error( 'Failed to get Catalog Object', res['Message'] )
      return S_OK( ( 0, len( toRegister ) ) )
    for lfn, error in res['Value']['Failed'].items():
      self.failedRegistrations[lfn] = toRegister[lfn]
      self.log.error( 'Registration of Replica failed', '%s : %s' % ( lfn, str( error ) ) )
    return S_OK( ( len( res['Value']['Successful'] ), len( toRegister ) ) )
Пример #4
0
  def test_getAccessUrl( self ):
    print '\n\n#########################################################################\n\n\t\tGet access url test\n'
    destinationFilePath = '%s/testFile.%s' % ( self.destDirectory, time.time() )
    pfnForLfnRes = Utils.executeSingleFileOrDirWrapper( self.storageElement.getPfnForLfn( destinationFilePath ) )
    destinationPfn = pfnForLfnRes['Value']
    fileDict = {destinationPfn:self.localSourceFile}
    putFileRes = self.storageElement.putFile( fileDict, singleFile = True )
    # Get a transfer url for the file
    getTurlRes = self.storageElement.getAccessUrl( destinationPfn, singleFile = True )
    # Remove the destination file
    removeFileRes = self.storageElement.removeFile( destinationPfn, singleFile = True )
    # Get missing turl res
    getMissingTurlRes = self.storageElement.getAccessUrl( destinationPfn, singleFile = True )

    # Check that the put was done correctly
    self.assert_( putFileRes['OK'] )
    self.assert_( putFileRes['Value'] )
    self.assertEqual( putFileRes['Value'], self.localFileSize )
    # Check that we can get the tURL properly
    self.assert_( getTurlRes['OK'] )
    self.assert_( getTurlRes['Value'] )
    self.assert_( type( getTurlRes['Value'] ) in types.StringTypes )
    # Check that the removal was done correctly
    self.assert_( removeFileRes['OK'] )
    self.assert_( removeFileRes['Value'] )
Пример #5
0
  def test_getFileSize( self ):
    print '\n\n#########################################################################\n\n\t\t\tGet file size test\n'
    destinationFilePath = '%s/testFile.%s' % ( self.destDirectory, time.time() )
    pfnForLfnRes = Utils.executeSingleFileOrDirWrapper( self.storageElement.getPfnForLfn( destinationFilePath ) )
    destinationPfn = pfnForLfnRes['Value']
    fileDict = {destinationPfn:self.localSourceFile}
    putFileRes = self.storageElement.putFile( fileDict, singleFile = True )
    # Get the file metadata
    getFileSizeRes = self.storageElement.getFileSize( destinationPfn, singleFile = True )
    # Now remove the destination file
    removeFileRes = self.storageElement.removeFile( destinationPfn, singleFile = True )
    # Get metadata for a removed file
    getMissingFileSizeRes = self.storageElement.getFileSize( destinationPfn, singleFile = True )
    # Check directories are handled properly
    destinationDir = os.path.dirname( destinationPfn )
    directorySizeRes = self.storageElement.getFileSize( destinationDir, singleFile = True )

    # Check that the put was done correctly
    self.assert_( putFileRes['OK'] )
    self.assert_( putFileRes['Value'] )
    self.assertEqual( putFileRes['Value'], self.localFileSize )
    # Check that the metadata was done correctly
    self.assert_( getFileSizeRes['OK'] )
    self.assertEqual( getFileSizeRes['Value'], self.localFileSize )
    # Check that the removal was done correctly
    self.assert_( removeFileRes['OK'] )
    self.assert_( removeFileRes['Value'] )
    # Check the get metadata for non existant file
    self.assertFalse( getMissingFileSizeRes['OK'] )
    expectedError = "File does not exist"
    self.assert_( expectedError in getMissingFileSizeRes['Message'] )
    # Check that metadata operation with a directory
    self.assertFalse( directorySizeRes['OK'] )
    expectedError = "Supplied path is not a file"
    self.assert_( expectedError in directorySizeRes['Message'] )
  def __getCatalogDirectoryContents( self, directories ):
    """ get catalog contents under paths :directories:

    :param self: self reference
    :param list directories: list of paths in catalog
    """
    self.log.info( 'Obtaining the catalog contents for %d directories:' % len( directories ) )
    for directory in directories:
      self.log.info( directory )
    activeDirs = directories
    allFiles = {}
    fc = FileCatalog()
    while len( activeDirs ) > 0:
      currentDir = activeDirs[0]
      res = Utils.executeSingleFileOrDirWrapper( fc.listDirectory( currentDir ) )
      activeDirs.remove( currentDir )
      if not res['OK'] and res['Message'].endswith( 'The supplied path does not exist' ):
        self.log.info( "The supplied directory %s does not exist" % currentDir )
      elif not res['OK']:
        if "No such file or directory" in res['Message']:
          self.log.info( "%s: %s" % ( currentDir, res['Message'] ) )
        else:
          self.log.error( "Failed to get directory %s content: %s" % ( currentDir, res['Message'] ) )
      else:
        dirContents = res['Value']
        activeDirs.extend( dirContents['SubDirs'] )
        allFiles.update( dirContents['Files'] )
    self.log.info( "Found %d files" % len( allFiles ) )
    return S_OK( allFiles.keys() )
Пример #7
0
  def __insertRegisterOperation( self, request, operation, toRegister ):
    """ add RegisterReplica operation

    :param Request request: request instance
    :param Operation transferOp: 'ReplicateAndRegister' operation for this FTSJob
    :param list toRegister: [ FTSDB.FTSFile, ... ] - files that failed to register
    """
    log = self.log.getSubLogger( "%s/registerFiles" % request.RequestName )

    byTarget = {}
    for ftsFile in toRegister:
      if ftsFile.TargetSE not in byTarget:
        byTarget.setdefault( ftsFile.TargetSE, [] )
      byTarget[ftsFile.TargetSE].append( ftsFile )
    log.info( "will create %s 'RegisterReplica' operations" % len( byTarget ) )

    for target, ftsFileList in byTarget.items():
      log.info( "creating 'RegisterReplica' operation for targetSE %s with %s files..." % ( target,
                                                                                            len( ftsFileList ) ) )
      registerOperation = Operation()
      registerOperation.Type = "RegisterReplica"
      registerOperation.Status = "Waiting"
      registerOperation.TargetSE = target
      targetSE = self.getSE( target )
      for ftsFile in ftsFileList:
        opFile = File()
        opFile.LFN = ftsFile.LFN
        pfn = Utils.executeSingleFileOrDirWrapper( targetSE.getPfnForProtocol( ftsFile.TargetSURL, protocol = "SRM2", withPort = False ) )
        if not pfn["OK"]:
          continue
        opFile.PFN = pfn["Value"]
        registerOperation.addFile( opFile )
      request.insertBefore( registerOperation, operation )

    return S_OK()
Пример #8
0
  def resolveCatalogPFNSizeMismatch( self, problematicDict ):
    """ This takes the problematic dictionary returned by the integrity DB and resolved the CatalogPFNSizeMismatch prognosis
    """
    lfn = problematicDict['LFN']
    pfn = problematicDict['PFN']
    se = problematicDict['SE']
    fileID = problematicDict['FileID']


    res = Utils.executeSingleFileOrDirWrapper( self.fc.getFileSize( lfn ) )
    if not res['OK']:
      return self.__returnProblematicError( fileID, res )
    catalogSize = res['Value']
    res = Utils.executeSingleFileOrDirWrapper( StorageElement( se ).getFileSize( pfn ) )
    if not res['OK']:
      return self.__returnProblematicError( fileID, res )
    storageSize = res['Value']
    bkKCatalog = FileCatalog( ['BookkeepingDB'] )
    res = Utils.executeSingleFileOrDirWrapper( bkKCatalog.getFileSize( lfn ) )
    if not res['OK']:
      return self.__returnProblematicError( fileID, res )
    bookkeepingSize = res['Value']
    if bookkeepingSize == catalogSize == storageSize:
      gLogger.info( "CatalogPFNSizeMismatch replica (%d) matched all registered sizes." % fileID )
      return self.__updateReplicaToChecked( problematicDict )
    if ( catalogSize == bookkeepingSize ):
      gLogger.info( "CatalogPFNSizeMismatch replica (%d) found to mismatch the bookkeeping also" % fileID )
      res = Utils.executeSingleFileOrDirWrapper( self.fc.getReplicas( lfn ) )
      if not res['OK']:
        return self.__returnProblematicError( fileID, res )
      if len( res['Value'] ) <= 1:
        gLogger.info( "CatalogPFNSizeMismatch replica (%d) has no other replicas." % fileID )
        return S_ERROR( "Not removing catalog file mismatch since the only replica" )
      else:
        gLogger.info( "CatalogPFNSizeMismatch replica (%d) has other replicas. Removing..." % fileID )
        res = self.dm.removeReplica( se, lfn )
        if not res['OK']:
          return self.__returnProblematicError( fileID, res )
        return self.__updateCompletedFiles( 'CatalogPFNSizeMismatch', fileID )
    if ( catalogSize != bookkeepingSize ) and ( bookkeepingSize == storageSize ):
      gLogger.info( "CatalogPFNSizeMismatch replica (%d) found to match the bookkeeping size" % fileID )
      res = self.__updateReplicaToChecked( problematicDict )
      if not res['OK']:
        return self.__returnProblematicError( fileID, res )
      return self.changeProblematicPrognosis( fileID, 'BKCatalogSizeMismatch' )
    gLogger.info( "CatalogPFNSizeMismatch replica (%d) all sizes found mismatch. Updating retry count" % fileID )
    return self.incrementProblematicRetry( fileID )
Пример #9
0
  def resolveLFNCatalogMissing( self, problematicDict ):
    """ This takes the problematic dictionary returned by the integrity DB and resolved the LFNCatalogMissing prognosis
    """
    lfn = problematicDict['LFN']
    fileID = problematicDict['FileID']

    res = Utils.executeSingleFileOrDirWrapper( self.fc.exists( lfn ) )
    if not res['OK']:
      return self.__returnProblematicError( fileID, res )
    if res['Value']:
      return self.__updateCompletedFiles( 'LFNCatalogMissing', fileID )
    # Remove the file from all catalogs
    # RF_NOTE : here I can do it because it's a single file, but otherwise I would need to sort the path
    res = Utils.executeSingleFileOrDirWrapper( self.fc.removeFile( lfn ) )
    if not res['OK']:
      return self.__returnProblematicError( fileID, res )
    return self.__updateCompletedFiles( 'LFNCatalogMissing', fileID )
Пример #10
0
 def setUp( self ):
   self.numberOfFiles = 1
   self.storageElement = StorageElement( storageElementToTest )
   self.localSourceFile = "/etc/group"
   self.localFileSize = getSize( self.localSourceFile )
   self.destDirectory = "/lhcb/test/unit-test/TestStorageElement"
   destinationDir = Utils.executeSingleFileOrDirWrapper( self.storageElement.getPfnForLfn( self.destDirectory ) )
   res = self.storageElement.createDirectory( destinationDir, singleDirectory = True )
   self.assert_( res['OK'] )
Пример #11
0
 def __populateMetadata(self):
   res = Utils.executeSingleFileOrDirWrapper( self.fc.getFileMetadata( self.lfn ) )
   if not res['OK']:
     return res
   metadata = res['Value']
   self.setChecksum(metadata['Checksum'])
   self.setGUID(metadata['GUID'])
   self.setSize(metadata['Size']) 
   self.setStatus(metadata['Status'])
   return S_OK()
Пример #12
0
  def __updateReplicaToChecked( self, problematicDict ):
    lfn = problematicDict['LFN']
    fileID = problematicDict['FileID']
    prognosis = problematicDict['Prognosis']
    problematicDict['Status'] = 'Checked'

    res = Utils.executeSingleFileOrDirWrapper( self.fc.setReplicaStatus( {lfn:problematicDict} ) )

    if not res['OK']:
      return self.__returnProblematicError( fileID, res )
    gLogger.info( "%s replica (%d) is updated to Checked status" % ( prognosis, fileID ) )
    return self.__updateCompletedFiles( prognosis, fileID )
  def cleanTransformationLogFiles( self, directory ):
    """ clean up transformation logs from directory :directory:

    :param self: self reference
    :param str directory: folder name
    """
    self.log.info( "Removing log files found in the directory %s" % directory )
    res = Utils.executeSingleFileOrDirWrapper( StorageElement( self.logSE ).removeDirectory( directory ) )
    if not res['OK']:
      self.log.error( "Failed to remove log files", res['Message'] )
      return res
    self.log.info( "Successfully removed transformation log directory" )
    return S_OK()
Пример #14
0
  def __getRegisteredPFNLFN( self, pfn, storageElement ):

    res = StorageElement( storageElement ).getPfnForProtocol( pfn, withPort = False )
    if not res['OK']:
      gLogger.error( "Failed to get registered PFN for physical files", res['Message'] )
      return res
    for pfn, error in res['Value']['Failed'].items():
      gLogger.error( 'Failed to obtain registered PFN for physical file', '%s %s' % ( pfn, error ) )
      return S_ERROR( 'Failed to obtain registered PFNs from physical file' )
    registeredPFN = res['Value']['Successful'][pfn]
    res = Utils.executeSingleFileOrDirWrapper( self.fc.getLFNForPFN( registeredPFN ) )
    if ( not res['OK'] ) and re.search( 'No such file or directory', res['Message'] ):
      return S_OK( False )
    return S_OK( res['Value'] )
Пример #15
0
  def resolvePFNZeroSize( self, problematicDict ):
    """ This takes the problematic dictionary returned by the integrity DB and resolves the PFNZeroSize prognosis
    """
    pfn = problematicDict['PFN']
    seName = problematicDict['SE']
    fileID = problematicDict['FileID']

    se = StorageElement( seName )

    res = Utils.executeSingleFileOrDirWrapper( se.getFileSize( pfn ) )
    if ( not res['OK'] ) and ( re.search( 'File does not exist', res['Message'] ) ):
      gLogger.info( "PFNZeroSize replica (%d) found to be missing. Updating prognosis" % problematicDict['FileID'] )
      return self.changeProblematicPrognosis( fileID, 'PFNMissing' )
    storageSize = res['Value']
    if storageSize == 0:
      res = Utils.executeSingleFileOrDirWrapper( se.removeFile( pfn ) )

      if not res['OK']:
        return self.__returnProblematicError( fileID, res )
      gLogger.info( "PFNZeroSize replica (%d) removed. Updating prognosis" % problematicDict['FileID'] )
      return self.changeProblematicPrognosis( fileID, 'PFNMissing' )
    res = self.__getRegisteredPFNLFN( pfn, seName )
    if not res['OK']:
      return self.__returnProblematicError( fileID, res )
    lfn = res['Value']
    if not lfn:
      gLogger.info( "PFNZeroSize replica (%d) not registered in catalog. Updating prognosis" % problematicDict['FileID'] )
      return self.changeProblematicPrognosis( fileID, 'PFNNotRegistered' )
    res = Utils.executeSingleFileOrDirWrapper( self.fc.getFileMetadata( lfn ) )

    if not res['OK']:
      return self.__returnProblematicError( fileID, res )
    catalogSize = res['Value']['Size']
    if catalogSize != storageSize:
      gLogger.info( "PFNZeroSize replica (%d) size found to differ from registered metadata. Updating prognosis" % problematicDict['FileID'] )
      return self.changeProblematicPrognosis( fileID, 'CatalogPFNSizeMismatch' )
    return self.__updateCompletedFiles( 'PFNZeroSize', fileID )
Пример #16
0
  def test_getDirectory( self ):
    print '\n\n#########################################################################\n\n\t\t\tGet directory test\n'
    directory = "%s/%s" % ( self.destDirectory, 'getDirectoryTest' )
    destDirectory = Utils.executeSingleFileOrDirWrapper( self.storageElement.getPfnForLfn( directory ) )['Value']
    # Create a local directory to upload
    localDir = '/tmp/unit-test'
    srcFile = '/etc/group'
    sizeOfLocalFile = getSize( srcFile )
    if not os.path.exists( localDir ):
      os.mkdir( localDir )
    for i in range( self.numberOfFiles ):
      shutil.copy( srcFile, '%s/testFile.%s' % ( localDir, time.time() ) )
      time.sleep( 1 )
    # Check that we can successfully upload the directory to the storage element
    dirDict = {destDirectory:localDir}
    putDirRes = self.storageElement.putDirectory( dirDict, singleDirectory = True )
    # Get the directory metadata
    #Clean up the locally created directory
    shutil.rmtree( localDir )
    getDirRes = self.storageElement.getDirectory( destDirectory, localPath = localDir, singleDirectory = True )
    # Now remove the remove directory
    removeDirRes = self.storageElement.removeDirectory( destDirectory, recursive = True, singleDirectory = True )
    #Clean up the locally created directory
    shutil.rmtree( localDir )

    # Perform the checks for the put dir operation
    self.assert_( putDirRes['OK'] )
    self.assert_( putDirRes['Value'] )
    if putDirRes['Value']['Files']:
      self.assertEqual( putDirRes['Value']['Files'], self.numberOfFiles )
      self.assertEqual( putDirRes['Value']['Size'], self.numberOfFiles * sizeOfLocalFile )
    self.assert_( type( putDirRes['Value']['Files'] ) in [types.IntType, types.LongType] )
    self.assert_( type( putDirRes['Value']['Size'] ) in  [types.IntType, types.LongType] )
    # Perform the checks for the get directory operation
    self.assert_( getDirRes['OK'] )
    self.assert_( getDirRes['Value'] )
    if getDirRes['Value']['Files']:
      self.assertEqual( getDirRes['Value']['Files'], self.numberOfFiles )
      self.assertEqual( getDirRes['Value']['Size'], self.numberOfFiles * sizeOfLocalFile )
    self.assert_( type( getDirRes['Value']['Files'] ) in [types.IntType, types.LongType] )
    self.assert_( type( getDirRes['Value']['Size'] ) in [types.IntType, types.LongType] )
    # Perform the checks for the remove directory operation
    self.assert_( removeDirRes['OK'] )
    self.assert_( removeDirRes['Value'] )
    if removeDirRes['Value']['FilesRemoved']:
      self.assertEqual( removeDirRes['Value']['FilesRemoved'], self.numberOfFiles )
      self.assertEqual( removeDirRes['Value']['SizeRemoved'], self.numberOfFiles * sizeOfLocalFile )
    self.assert_( type( removeDirRes['Value']['FilesRemoved'] ) in [types.IntType, types.LongType] )
    self.assert_( type( removeDirRes['Value']['SizeRemoved'] ) in [types.IntType, types.LongType] )
Пример #17
0
  def test_createDirectory( self ):
    print '\n\n#########################################################################\n\n\t\t\tCreate directory test\n'
    directory = "%s/%s" % ( self.destDirectory, 'createDirectoryTest' )
    pfnForLfnRes = Utils.executeSingleFileOrDirWrapper( self.storageElement.getPfnForLfn( directory ) )
    directoryPfn = pfnForLfnRes['Value']
    createDirRes = self.storageElement.createDirectory( directoryPfn, singleDirectory = True )
    # Remove the target dir
    removeDirRes = self.storageElement.removeDirectory( directoryPfn, recursive = True, singleDirectory = True )

    # Check that the creation was done correctly
    self.assert_( createDirRes['OK'] )
    self.assert_( createDirRes['Value'] )
    # Remove the directory
    self.assert_( removeDirRes['OK'] )
    self.assert_( removeDirRes['Value'] )
  def __removeStorageDirectory( self, directory, storageElement ):
    """ wipe out all contents from :directory: at :storageElement:

    :param self: self reference
    :param str directory: path
    :param str storageElement: SE name
    """
    self.log.info( 'Removing the contents of %s at %s' % ( directory, storageElement ) )

    se = StorageElement( storageElement )

    res = se.getPfnForLfn( [directory] )
    if not res['OK']:
      self.log.error( "Failed to get PFN for directory", res['Message'] )
      return res
    if directory in res['Value']['Failed']:
      self.log.verbose( 'Failed to obtain directory PFN from LFN', '%s %s' % ( directory, res['Value']['Failed'][directory] ) )
      return S_ERROR( 'Failed to obtain directory PFN from LFNs' )
    storageDirectory = res['Value']['Successful'][directory]

    res = Utils.executeSingleFileOrDirWrapper( se.exists( storageDirectory ) )
    if not res['OK']:
      self.log.error( "Failed to obtain existance of directory", res['Message'] )
      return res
    exists = res['Value']
    if not exists:
      self.log.info( "The directory %s does not exist at %s " % ( directory, storageElement ) )
      return S_OK()
    res = Utils.executeSingleFileOrDirWrapper( se.removeDirectory( storageDirectory, recursive = True ) )
    if not res['OK']:
      self.log.error( "Failed to remove storage directory", res['Message'] )
      return res
    self.log.info( "Successfully removed %d files from %s at %s" % ( res['Value']['FilesRemoved'],
                                                                     directory,
                                                                     storageElement ) )
    return S_OK()
Пример #19
0
  def test_isDirectory( self ):
    print '\n\n#########################################################################\n\n\t\t\tIs directory test\n'
    destDirectory = Utils.executeSingleFileOrDirWrapper( self.storageElement.getPfnForLfn( self.destDirectory ) )['Value']
    # Test that it is a directory
    isDirectoryRes = self.storageElement.isDirectory( destDirectory, singleDirectory = True )
    # Test that no existant dirs are handled correctly
    nonExistantDir = "%s/%s" % ( destDirectory, 'NonExistant' )
    nonExistantDirRes = self.storageElement.isDirectory( nonExistantDir, singleDirectory = True )

    # Check that it works with the existing dir
    self.assert_( isDirectoryRes['OK'] )
    self.assert_( isDirectoryRes['Value'] )
    # Check that we handle non existant correctly
    self.assertFalse( nonExistantDirRes['OK'] )
    expectedError = 'Directory does not exist'
    self.assert_( expectedError in nonExistantDirRes['Message'] )
Пример #20
0
 def getReplicas(self):
   if not self.lfn:
     return S_ERROR('No LFN is known')
   if self.catalogReplicas:
     replicas = {}
     for replica in self.catalogReplicas:
       replicas[replica.se] = replica.pfn
     return S_OK(replicas)
   res = Utils.executeSingleFileOrDirWrapper( self.fc.getCatalogReplicas( self.lfn ) )
   if not res['OK']:
     return res
   replicas = res['Value']
   for se,pfn in replicas.items():
     oCatalogReplica = CatalogReplica(pfn=pfn,storageElement=se,status='U')
     self.catalogReplicas.append(oCatalogReplica)
   return S_OK(replicas)
Пример #21
0
  def test_putFile( self ):
    print '\n\n#########################################################################\n\n\t\t\tPut file test\n'
    destinationFilePath = '%s/testFile.%s' % ( self.destDirectory, time.time() )
    pfnForLfnRes = Utils.executeSingleFileOrDirWrapper( self.storageElement.getPfnForLfn( destinationFilePath ) )
    destinationPfn = pfnForLfnRes['Value']
    fileDict = {destinationPfn:self.localSourceFile}
    putFileRes = self.storageElement.putFile( fileDict, singleFile = True )
    # Now remove the destination file
    removeFileRes = self.storageElement.removeFile( destinationPfn, singleFile = True )

    # Check that the put was done correctly
    self.assert_( putFileRes['OK'] )
    self.assert_( putFileRes['Value'] )
    self.assertEqual( putFileRes['Value'], self.localFileSize )
    # Check that the removal was done correctly
    self.assert_( removeFileRes['OK'] )
    self.assert_( removeFileRes['Value'] )
Пример #22
0
  def getProxyForLFN( self, lfn ):
    """ get proxy for LFN

    :param self: self reference
    :param str lfn: LFN
    """
    dirMeta = Utils.executeSingleFileOrDirWrapper( self.fc.getDirectoryMetadata( lfn ) )
    if not dirMeta["OK"]:
      return dirMeta
    dirMeta = dirMeta["Value"]

    ownerRole = "/%s" % dirMeta["OwnerRole"] if not dirMeta["OwnerRole"].startswith( "/" ) else dirMeta["OwnerRole"]
    ownerDN = dirMeta["OwnerDN"]

    ownerProxy = None
    # Execute with server certificate
    gConfigurationData.setOptionInCFG( '/DIRAC/Security/UseServerCertificate', 'true' )
    for ownerGroup in getGroupsWithVOMSAttribute( ownerRole ):
      vomsProxy = gProxyManager.downloadVOMSProxy( ownerDN, ownerGroup, limited = True,
                                                   requiredVOMSAttribute = ownerRole )
      if not vomsProxy["OK"]:
        self.debug( "getProxyForLFN: failed to get VOMS proxy for %s role=%s: %s" % ( ownerDN,
                                                                                      ownerRole,
                                                                                      vomsProxy["Message"] ) )
        continue
      ownerProxy = vomsProxy["Value"]
      self.debug( "getProxyForLFN: got proxy for %s@%s [%s]" % ( ownerDN, ownerGroup, ownerRole ) )
      break

    gConfigurationData.setOptionInCFG( '/DIRAC/Security/UseServerCertificate', 'false' )
    if not ownerProxy:
      return S_ERROR( "Unable to get owner proxy" )

    dumpToFile = ownerProxy.dumpAllToFile()
    if not dumpToFile["OK"]:
      self.error( "getProxyForLFN: error dumping proxy to file: %s" % dumpToFile["Message"] )
      return dumpToFile
    dumpToFile = dumpToFile["Value"]
    os.environ["X509_USER_PROXY"] = dumpToFile

    return S_OK()
Пример #23
0
  def resolvePFNUnavailable( self, problematicDict ):
    """ This takes the problematic dictionary returned by the integrity DB and resolved the PFNUnavailable prognosis
    """
    pfn = problematicDict['PFN']
    se = problematicDict['SE']
    fileID = problematicDict['FileID']

    res = Utils.executeSingleFileOrDirWrapper( StorageElement( se ).getFileMetadata( pfn ) )
    if ( not res['OK'] ) and ( re.search( 'File does not exist', res['Message'] ) ):
      # The file is no longer Unavailable but has now dissapeared completely
      gLogger.info( "PFNUnavailable replica (%d) found to be missing. Updating prognosis" % fileID )
      return self.changeProblematicPrognosis( fileID, 'PFNMissing' )
    if ( not res['OK'] ) or res['Value']['Unavailable']:
      gLogger.info( "PFNUnavailable replica (%d) found to still be Unavailable" % fileID )
      return self.incrementProblematicRetry( fileID )
    if res['Value']['Lost']:
      gLogger.info( "PFNUnavailable replica (%d) is now found to be Lost. Updating prognosis" % fileID )
      return self.changeProblematicPrognosis( fileID, 'PFNLost' )
    gLogger.info( "PFNUnavailable replica (%d) is no longer Unavailable" % fileID )
    # Need to make the replica okay in the Catalog
    return self.__updateReplicaToChecked( problematicDict )
Пример #24
0
    def finalize(self):
        """ register successfully transferred  files """

        if self.Status not in FTSJob.FINALSTATES:
            return S_OK()

        startTime = time.time()
        targetSE = StorageElement(self.TargetSE)
        toRegister = [ftsFile for ftsFile in self if ftsFile.Status == "Finished"]
        toRegisterDict = {}
        for ftsFile in toRegister:
            pfn = Utils.executeSingleFileOrDirWrapper(
                targetSE.getPfnForProtocol(ftsFile.TargetSURL, protocol="SRM2", withPort=False)
            )
            if not pfn["OK"]:
                continue
            pfn = pfn["Value"]
            toRegisterDict[ftsFile.LFN] = {"PFN": pfn, "SE": self.TargetSE}

        if toRegisterDict:
            self._regTotal += len(toRegisterDict)
            register = self._fc.addReplica(toRegisterDict)
            self._regTime += time.time() - startTime
            if not register["OK"]:
                # FIXME: shouldn't be a print!
                for ftsFile in toRegister:
                    ftsFile.Error = "AddCatalogReplicaFailed"
                    print ftsFile.Error
                return register
            register = register["Value"]
            self._regSuccess += len(register.get("Successful", {}))
            failedFiles = register.get("Failed", {})
            # FIXME
            for ftsFile in toRegister:
                if ftsFile.LFN in failedFiles:
                    ftsFile.Error = "AddCatalogReplicaFailed"
                    print ftsFile.Error

        return S_OK()
Пример #25
0
  def getProxyForLFN( self, lfn ):
    """ get proxy for lfn

    :param str lfn: LFN
    :return: S_ERROR or S_OK( "/path/to/proxy/file" )
    """
    dirMeta = Utils.executeSingleFileOrDirWrapper( self.fc.getDirectoryMetadata( lfn ) )
    if not dirMeta["OK"]:
      return dirMeta
    dirMeta = dirMeta["Value"]

    ownerRole = "/%s" % dirMeta["OwnerRole"] if not dirMeta["OwnerRole"].startswith( "/" ) else dirMeta["OwnerRole"]
    ownerDN = dirMeta["OwnerDN"]

    ownerProxy = None
    for ownerGroup in getGroupsWithVOMSAttribute( ownerRole ):
      vomsProxy = gProxyManager.downloadVOMSProxy( ownerDN, ownerGroup, limited = True,
                                                   requiredVOMSAttribute = ownerRole )
      if not vomsProxy["OK"]:
        self.log.debug( "getProxyForLFN: failed to get VOMS proxy for %s role=%s: %s" % ( ownerDN,
                                                                                          ownerRole,
                                                                                          vomsProxy["Message"] ) )
        continue
      ownerProxy = vomsProxy["Value"]
      self.log.debug( "getProxyForLFN: got proxy for %s@%s [%s]" % ( ownerDN, ownerGroup, ownerRole ) )
      break

    if not ownerProxy:
      return S_ERROR( "Unable to get owner proxy" )

    dumpToFile = ownerProxy.dumpAllToFile()
    if not dumpToFile["OK"]:
      self.log.error( "getProxyForLFN: error dumping proxy to file: %s" % dumpToFile["Message"] )
      return dumpToFile
    dumpToFile = dumpToFile["Value"]
    os.environ["X509_USER_PROXY"] = dumpToFile
    return dumpToFile
Пример #26
0
  def resolveSource( self ):
    """ resolve source SE eligible for submission

    :param self: self reference
    """

    # Avoid resolving sources twice
    if self.sourceResolved:
      return S_OK()
    # Only resolve files that need a transfer
    toResolve = [ lfn for lfn in self.fileDict if self.fileDict[lfn].get( "Status", "" ) != "Failed" ]
    if not toResolve:
      return S_OK()
    res = self.__updateMetadataCache( toResolve )
    if not res['OK']:
      return res
    res = self.__updateReplicaCache( toResolve )
    if not res['OK']:
      return res

    # Define the source URLs
    for lfn in toResolve:
      replicas = self.catalogReplicas.get( lfn, {} )
      if self.sourceSE not in replicas:
        gLogger.warn( "resolveSource: skipping %s - not replicas at SourceSE %s" % ( lfn, self.sourceSE ) )
        self.__setFileParameter( lfn, 'Reason', "No replica at SourceSE" )
        self.__setFileParameter( lfn, 'Status', 'Failed' )
        continue
      # Fix first the PFN
      pfn = self.oSourceSE.getPfnForLfn( lfn ).get( 'Value', {} ).get( 'Successful', {} ).get( lfn, replicas[self.sourceSE] )
      res = Utils.executeSingleFileOrDirWrapper( self.oSourceSE.getPfnForProtocol( pfn, protocol = 'SRM2', withPort = True ) )
      if not res['OK']:
        gLogger.warn( "resolveSource: skipping %s - %s" % ( lfn, res["Message"] ) )
        self.__setFileParameter( lfn, 'Reason', res['Message'] )
        self.__setFileParameter( lfn, 'Status', 'Failed' )
        continue
      res = self.setSourceSURL( lfn, res['Value'] )
      if not res['OK']:
        gLogger.warn( "resolveSource: skipping %s - %s" % ( lfn, res["Message"] ) )
        self.__setFileParameter( lfn, 'Reason', res['Message'] )
        self.__setFileParameter( lfn, 'Status', 'Failed' )
        continue

    toResolve = {}
    for lfn in self.fileDict:
      if "Source" in self.fileDict[lfn]:
        toResolve[self.fileDict[lfn]['Source']] = lfn
    if not toResolve:
      return S_ERROR( "No eligible Source files" )

    # Get metadata of the sources, to check for existance, availability and caching
    res = self.oSourceSE.getFileMetadata( toResolve.keys() )
    if not res['OK']:
      return S_ERROR( "Failed to check source file metadata" )

    for pfn, error in res['Value']['Failed'].items():
      lfn = toResolve[pfn]
      if re.search( 'File does not exist', error ):
        gLogger.warn( "resolveSource: skipping %s - source file does not exists" % lfn )
        self.__setFileParameter( lfn, 'Reason', "Source file does not exist" )
        self.__setFileParameter( lfn, 'Status', 'Failed' )
      else:
        gLogger.warn( "resolveSource: skipping %s - failed to get source metadata" % lfn )
        self.__setFileParameter( lfn, 'Reason', "Failed to get Source metadata" )
        self.__setFileParameter( lfn, 'Status', 'Failed' )
    toStage = []

    nbStagedFiles = 0
    for pfn, metadata in res['Value']['Successful'].items():
      lfn = toResolve[pfn]
      lfnStatus = self.fileDict.get( lfn, {} ).get( 'Status' )
      if metadata['Unavailable']:
        gLogger.warn( "resolveSource: skipping %s - source file unavailable" % lfn )
        self.__setFileParameter( lfn, 'Reason', "Source file Unavailable" )
        self.__setFileParameter( lfn, 'Status', 'Failed' )
      elif metadata['Lost']:
        gLogger.warn( "resolveSource: skipping %s - source file lost" % lfn )
        self.__setFileParameter( lfn, 'Reason', "Source file Lost" )
        self.__setFileParameter( lfn, 'Status', 'Failed' )
      elif not metadata['Cached']:
        if lfnStatus != 'Staging':
          toStage.append( pfn )
      elif metadata['Size'] != self.catalogMetadata[lfn]['Size']:
        gLogger.warn( "resolveSource: skipping %s - source file size mismatch" % lfn )
        self.__setFileParameter( lfn, 'Reason', "Source size mismatch" )
        self.__setFileParameter( lfn, 'Status', 'Failed' )
      elif self.catalogMetadata[lfn]['Checksum'] and metadata['Checksum'] and \
            not ( compareAdler( metadata['Checksum'], self.catalogMetadata[lfn]['Checksum'] ) ):
        gLogger.warn( "resolveSource: skipping %s - source file checksum mismatch" % lfn )
        self.__setFileParameter( lfn, 'Reason', "Source checksum mismatch" )
        self.__setFileParameter( lfn, 'Status', 'Failed' )
      elif lfnStatus == 'Staging':
        # file that was staging is now cached
        self.__setFileParameter( lfn, 'Status', 'Waiting' )
        nbStagedFiles += 1

    # Some files were being staged
    if nbStagedFiles:
      self.log.info( 'resolveSource: %d files have been staged' % nbStagedFiles )

    # Launching staging of files not in cache
    if toStage:
      gLogger.warn( "resolveSource: %s source files not cached, prestaging..." % len( toStage ) )
      stage = self.oSourceSE.prestageFile( toStage )
      if not stage["OK"]:
        gLogger.error( "resolveSource: error is prestaging - %s" % stage["Message"] )
        for pfn in toStage:
          lfn = toResolve[pfn]
          self.__setFileParameter( lfn, 'Reason', stage["Message"] )
          self.__setFileParameter( lfn, 'Status', 'Failed' )
      else:
        for pfn in toStage:
          lfn = toResolve[pfn]
          if pfn in stage['Value']['Successful']:
            self.__setFileParameter( lfn, 'Status', 'Staging' )
          elif pfn in stage['Value']['Failed']:
            self.__setFileParameter( lfn, 'Reason', stage['Value']['Failed'][pfn] )
            self.__setFileParameter( lfn, 'Status', 'Failed' )

    self.sourceResolved = True
    return S_OK()
Пример #27
0
  def __executeMethod( self, lfn, *args, **kwargs ):
    """ Forward the call to each storage in turn until one works.
        The method to be executed is stored in self.methodName
        :param lfn : string, list or dictionnary
        :param *args : variable amount of non-keyword arguments. SHOULD BE EMPTY
        :param **kwargs : keyword arguments
        :returns S_OK( { 'Failed': {lfn : reason} , 'Successful': {lfn : value} } )
                The Failed dict contains the lfn only if the operation failed on all the storages
                The Successful dict contains the value returned by the successful storages.
    """

    removedArgs = {}
    self.log.verbose( "StorageElement.__executeMethod : preparing the execution of %s" % ( self.methodName ) )

    # args should normaly be empty to avoid problem...
    if len( args ):
      self.log.verbose( "StorageElement.__executeMethod: args should be empty!%s" % args )
      # because there is normaly normaly only one kw argument, I can move it from args to kwargs
      methDefaultArgs = StorageElement.__defaultsArguments.get( self.methodName, {} ).keys()
      if len( methDefaultArgs ):
        kwargs[methDefaultArgs[0] ] = args[0]
        args = args[1:]
      self.log.verbose( "StorageElement.__executeMethod: put it in kwargs, but dirty and might be dangerous!args %s kwargs %s" % ( args, kwargs ) )


    # We check the deprecated arguments
    for depArg in StorageElement.__deprecatedArguments:
      if depArg in kwargs:
        self.log.verbose( "StorageElement.__executeMethod: %s is not an allowed argument anymore. Please change your code!" % depArg )
        removedArgs[depArg] = kwargs[depArg]
        del kwargs[depArg]



    # Set default argument if any
    methDefaultArgs = StorageElement.__defaultsArguments.get( self.methodName, {} )
    for argName in methDefaultArgs:
      if argName not in kwargs:
        self.log.debug( "StorageElement.__executeMethod : default argument %s for %s not present.\
         Setting value %s." % ( argName, self.methodName, methDefaultArgs[argName] ) )
        kwargs[argName] = methDefaultArgs[argName]


    if type( lfn ) in StringTypes:
      lfnDict = {lfn:False}
    elif type( lfn ) == ListType:
      lfnDict = {}
      for url in lfn:
        lfnDict[url] = False
    elif type( lfn ) == DictType:
      lfnDict = lfn.copy()
    else:
      errStr = "StorageElement.__executeMethod: Supplied lfns must be string, list of strings or a dictionary."
      self.log.debug( errStr )
      return S_ERROR( errStr )

    self.log.verbose( "StorageElement.__executeMethod: Attempting to perform '%s' operation with %s lfns." % ( self.methodName,
                                                                                                  len( lfnDict ) ) )

    res = self.isValid( operation = self.methodName )
    if not res['OK']:
      return res
    else:
      if not self.valid:
        return S_ERROR( self.errorReason )

    successful = {}
    failed = {}
    localSE = self.isLocalSE()['Value']
    # Try all of the storages one by one
    for storage in self.storages:
      # Determine whether to use this storage object
      res = storage.getParameters()
      useProtocol = True
      if not res['OK']:
        self.log.debug( "StorageElement.__executeMethod: Failed to get storage parameters.", "%s %s" % ( self.name,
                                                                                            res['Message'] ) )
        useProtocol = False
      else:
        protocolName = res['Value']['ProtocolName']
        if not lfnDict:
          useProtocol = False
          self.log.debug( "StorageElement.__executeMethod: No lfns to be attempted for %s protocol." % protocolName )
        elif not ( protocolName in self.remoteProtocols ) and not localSE:
          # If the SE is not local then we can't use local protocols
          useProtocol = False
          self.log.debug( "StorageElement.__executeMethod: Local protocol not appropriate for remote use: %s." % protocolName )
      if useProtocol:
        self.log.verbose( "StorageElement.__executeMethod: Generating %s protocol PFNs for %s." % ( len( lfnDict ),
                                                                                       protocolName ) )
        res = self.__generatePfnDict( lfnDict, storage )
        pfnDict = res['Value']  # pfn : lfn
        failed.update( res['Failed'] )
        if not len( pfnDict ):
          self.log.verbose( "StorageElement.__executeMethod No pfns generated for protocol %s." % protocolName )
        else:
          self.log.verbose( "StorageElement.__executeMethod: Attempting to perform '%s' for %s physical files" % ( self.methodName,
                                                                                                      len( pfnDict ) ) )
          fcn = None
          if hasattr( storage, self.methodName ) and callable( getattr( storage, self.methodName ) ):
            fcn = getattr( storage, self.methodName )
          if not fcn:
            return S_ERROR( "StorageElement.__executeMethod: unable to invoke %s, it isn't a member function of storage" )

          pfnsToUse = {}  # pfn : the value of the lfn dictionary for the lfn of this pfn
          for pfn in pfnDict:
            pfnsToUse[pfn] = lfnDict[pfnDict[pfn]]

          res = fcn( pfnsToUse, *args, **kwargs )

          if not res['OK']:
            errStr = "StorageElement.__executeMethod: Completely failed to perform %s." % self.methodName
            self.log.debug( errStr, '%s for protocol %s: %s' % ( self.name, protocolName, res['Message'] ) )
            for lfn in pfnDict.values():
              if lfn not in failed:
                failed[lfn] = ''
              failed[lfn] += " %s" % ( res['Message'] )  # Concatenate! Not '=' :-)
          else:
            for pfn, lfn in pfnDict.items():
              if pfn not in res['Value']['Successful']:
                if lfn not in failed:
                  failed[lfn] = ''
                if pfn in res['Value']['Failed']:
                  failed[lfn] = "%s %s" % ( failed[lfn], res['Value']['Failed'][pfn] )
                else:
                  failed[lfn] = "%s %s" % ( failed[lfn], 'No error returned from plug-in' )
              else:
                successful[lfn] = res['Value']['Successful'][pfn]
                if lfn in failed:
                  failed.pop( lfn )
                lfnDict.pop( lfn )


    # Ensure backward compatibility for singleFile and singleDirectory for the time of a version
    singleFileOrDir = removedArgs.get( "singleFile", False ) or removedArgs.get( "singleDirectory", False )

    retValue = S_OK( { 'Failed': failed, 'Successful': successful } )

    if singleFileOrDir:
      self.log.verbose( "StorageElement.__executeMethod : use Utils.executeSingleFileOrDirWrapper for backward compatibility. You should fix your code " )
      retValue = Utils.executeSingleFileOrDirWrapper( retValue )

    return retValue
Пример #28
0
  def removeFile( self, index, requestObj, subRequestAttrs, subRequestFiles ):
    """ action for 'removeFile' operation

    :param self: self reference
    :param int index: subRequest index in execution order
    :param RequestContainer requestObj: request
    :param dict subRequestAttrs: subRequest's attributes
    :param dict subRequestFiles: subRequest's files
    """
    self.info( "removeFile: processing subrequest %s" % index )
    if requestObj.isSubRequestEmpty( index, "removal" )["Value"]:
      self.info( "removeFile: subrequest %s is empty, setting its status to 'Done'" % index )
      requestObj.setSubRequestStatus( index, "removal", "Done" )
      return S_OK( requestObj )

    lfns = [ str( subRequestFile["LFN"] ) for subRequestFile in subRequestFiles
             if subRequestFile["Status"] == "Waiting" and  str( subRequestFile["LFN"] ) ]
    self.debug( "removeFile: about to remove %d files" % len( lfns ) )
    # # keep removal status for each file
    removalStatus = dict.fromkeys( lfns, "" )
    self.addMark( "RemoveFileAtt", len( lfns ) )

    # # bulk removal 1st
    exists = self.fc.exists( lfns )
    if not exists['OK']:
      self.error( "removeFile: unable to check existence of files", exists['Message'] )
      return exists
    exists = exists['Value']['Successful']
    lfns = [lfn for lfn in exists if exists[lfn]]
    toRemove = []
    if lfns:
      bulkRemoval = self.dm.removeFile( lfns )
      if not bulkRemoval["OK"]:
        bulkRemoval = { 'Failed' : dict.fromkeys( lfns, bulkRemoval['Message'] )}
      else:
        bulkRemoval = bulkRemoval["Value"]
      failedLfns = bulkRemoval["Failed"] if "Failed" in bulkRemoval else []
      for lfn in removalStatus:
        if lfn in failedLfns and "no such file or directory" in str( bulkRemoval["Failed"][lfn] ).lower():
          removalStatus[lfn] = bulkRemoval["Failed"][lfn]
          removeCatalog = Utils.executeSingleFileOrDirWrapper(self.fc.removeFile( lfn ) )
          if not removeCatalog["OK"]:
            removalStatus[lfn] = removeCatalog["Message"]
            continue
        else:
          toRemove.append( lfn )

    # # loop over LFNs to remove
    for lfn in toRemove:
      self.debug( "removeFile: processing file %s" % lfn )
      try:
        # # try to remove using proxy already defined in os.environ
        removal = self.dm.removeFile( lfn )
        # # file is not existing?
        if not removal["OK"] and "no such file or directory" in str( removal["Message"] ).lower():
          removalStatus[lfn] = removal["Message"]
          continue
        # # not OK but request belongs to DataManager?
        if not self.requestOwnerDN and \
           ( not removal["OK"] and "Write access not permitted for this credential." in removal["Message"] ) or \
           ( removal["OK"] and "Failed" in removal["Value"] and
             lfn in removal["Value"]["Failed"] and
             "permission denied" in str( removal["Value"]["Failed"][lfn] ).lower() ):
          self.debug( "removeFile: retrieving proxy for %s" % lfn )
          getProxyForLFN = self.getProxyForLFN( lfn )
          # # can't get correct proxy? continue...
          if not getProxyForLFN["OK"]:
            self.warn( "removeFile: unable to get proxy for file %s: %s" % ( lfn, getProxyForLFN["Message"] ) )
            removal = getProxyForLFN
          else:
            # # you're a DataManager, retry with the new one proxy
            removal = self.dm.removeFile( lfn )
      finally:
        # # make sure DataManager proxy is set back in place
        if not self.requestOwnerDN and self.dataManagerProxy():
          # # remove temp proxy
          if os.environ["X509_USER_PROXY"] != self.dataManagerProxy():
            os.unlink( os.environ["X509_USER_PROXY"] )
          # # put back DataManager proxy
          os.environ["X509_USER_PROXY"] = self.dataManagerProxy()

      # # save error
      if not removal["OK"]:
        removalStatus[lfn] = removal["Message"]
        continue
      # # check fail reason, filter out missing files
      removal = removal["Value"]
      if lfn in removal["Failed"]:
        removalStatus[lfn] = removal["Failed"][lfn]

    # # counters
    filesRemoved = 0
    filesFailed = 0
    subRequestError = []
    # # update File statuses and errors
    for lfn, error in removalStatus.items():

      # # set file error if any
      if error:
        self.debug( "removeFile: %s: %s" % ( lfn, str( error ) ) )
        fileError = str( error ).replace( "'", "" )[:255]
        fileError = requestObj.setSubRequestFileAttributeValue( index, "removal", lfn,
                                                                "Error", fileError )
        if not fileError["OK"]:
          self.error( "removeFile: unable to set Error for %s: %s" % ( lfn, fileError["Message"] ) )
      # # no error? file not exists? - we are able to recover
      if not error or "no such file or directory" in str( error ).lower() or \
            "file does not exist" in str( error ).lower():
        filesRemoved += 1
        self.info( "removeFile: successfully removed %s" % lfn )
        updateStatus = requestObj.setSubRequestFileAttributeValue( index, "removal", lfn, "Status", "Done" )
        if not updateStatus["OK"]:
          self.error( "removeFile: unable to change status to 'Done' for %s" % lfn )
      else:
        filesFailed += 1
        self.warn( "removeFile: unable to remove file %s : %s" % ( lfn, error ) )
        errorStr = str( error )
        if type( error ) == type( {} ):
          errorStr = ";".join( [ "%s:%s" % ( key, value ) for key, value in error.items() ] )
        errorStr = errorStr.replace( "'", "" )
        subRequestError.append( "%s:%s" % ( lfn, errorStr ) )

    self.addMark( "RemoveFileDone", filesRemoved )
    self.addMark( "RemoveFileFail", filesFailed )

    # # all 'Done'?
    if requestObj.isSubRequestDone( index, "removal" )["Value"]:
      self.info( "removeFile: all files processed, setting subrequest status to 'Done'" )
      requestObj.setSubRequestStatus( index, "removal", "Done" )
    elif filesFailed:
      self.info( "removeFile: all files processed, %s files failed to remove" % filesFailed )
      subRequestError = ";".join( subRequestError )[:255]
      subRequestError = requestObj.setSubRequestAttributeValue( index, "removal", "Error", subRequestError )
    return S_OK( requestObj )
Пример #29
0
 def tearDown( self ):
   destinationDir = Utils.executeSingleFileOrDirWrapper( self.storageElement.getPfnForLfn( self.destDirectory ) )
   res = self.storageElement.removeDirectory( destinationDir, recursive = True, singleDirectory = True )
   self.assert_( res['OK'] )
Пример #30
0
  def resolvePFNNotRegistered( self, problematicDict ):
    """ This takes the problematic dictionary returned by the integrity DB and resolved the PFNNotRegistered prognosis
    """
    lfn = problematicDict['LFN']
    pfn = problematicDict['PFN']
    seName = problematicDict['SE']
    fileID = problematicDict['FileID']

    se = StorageElement( seName )
    res = Utils.executeSingleFileOrDirWrapper( self.fc.exists( lfn ) )
    if not res['OK']:
      return self.__returnProblematicError( fileID, res )
    if not res['Value']:
      # The file does not exist in the catalog
      res = Utils.executeSingleFileOrDirWrapper( se.removeFile( pfn ) )
      if not res['OK']:
        return self.__returnProblematicError( fileID, res )
      return self.__updateCompletedFiles( 'PFNNotRegistered', fileID )
    res = Utils.executeSingleFileOrDirWrapper( se.getFileMetadata( pfn ) )
    if ( not res['OK'] ) and ( re.search( 'File does not exist', res['Message'] ) ):
      gLogger.info( "PFNNotRegistered replica (%d) found to be missing." % fileID )
      return self.__updateCompletedFiles( 'PFNNotRegistered', fileID )
    elif not res['OK']:
      return self.__returnProblematicError( fileID, res )
    storageMetadata = res['Value']
    if storageMetadata['Lost']:
      gLogger.info( "PFNNotRegistered replica (%d) found to be Lost. Updating prognosis" % fileID )
      return self.changeProblematicPrognosis( fileID, 'PFNLost' )
    if storageMetadata['Unavailable']:
      gLogger.info( "PFNNotRegistered replica (%d) found to be Unavailable. Updating retry count" % fileID )
      return self.incrementProblematicRetry( fileID )

    # HACK until we can obtain the space token descriptions through GFAL
    site = seName.split( '_' )[0].split( '-' )[0]
    if not storageMetadata['Cached']:
      if lfn.endswith( '.raw' ):
        seName = '%s-RAW' % site
      else:
        seName = '%s-RDST' % site
    elif storageMetadata['Migrated']:
      if lfn.startswith( '/lhcb/data' ):
        seName = '%s_M-DST' % site
      else:
        seName = '%s_MC_M-DST' % site
    else:
      if lfn.startswith( '/lhcb/data' ):
        seName = '%s-DST' % site
      else:
        seName = '%s_MC-DST' % site

    problematicDict['SE'] = seName
    res = se.getPfnForProtocol( pfn, withPort = False )
    if not res['OK']:
      return self.__returnProblematicError( fileID, res )
    for pfn, error in res['Value']['Failed'].items():
      gLogger.error( 'Failed to obtain registered PFN for physical file', '%s %s' % ( pfn, error ) )
      return S_ERROR( 'Failed to obtain registered PFNs from physical file' )
    problematicDict['PFN'] = res['Value']['Successful'][pfn]

    res = Utils.executeSingleFileOrDirWrapper( self.fc.addReplica( {lfn:problematicDict} ) )
    if not res['OK']:
      return self.__returnProblematicError( fileID, res )
    res = Utils.executeSingleFileOrDirWrapper( self.fc.getFileMetadata( lfn ) )
    if not res['OK']:
      return self.__returnProblematicError( fileID, res )
    if res['Value']['Size'] != storageMetadata['Size']:
      gLogger.info( "PFNNotRegistered replica (%d) found with catalog size mismatch. Updating prognosis" % fileID )
      return self.changeProblematicPrognosis( fileID, 'CatalogPFNSizeMismatch' )
    return self.__updateCompletedFiles( 'PFNNotRegistered', fileID )