Esempio n. 1
0
 def __copyToExternalSE(self, localFilePath, sbPath):
     """
 Copy uploaded file to external SE
 """
     try:
         rm = ReplicaManager()
         result = rm.put(sbPath, localFilePath, self.__externalSEName)
         if not result['OK']:
             return result
         if 'Successful' not in result['Value']:
             gLogger.verbose("Oops, no successful transfers there",
                             str(result))
             return S_ERROR(
                 "RM returned OK to the action but no successful transfers were there"
             )
         okTrans = result['Value']['Successful']
         if sbPath not in okTrans:
             gLogger.verbose(
                 "Ooops, SB transfer wasn't in the successful ones",
                 str(result))
             return S_ERROR(
                 "RM returned OK to the action but SB transfer wasn't in the successful ones"
             )
         return S_OK((self.__externalSEName, okTrans[sbPath]))
     except Exception, e:
         return S_ERROR("Error while moving sandbox to SE: %s" % str(e))
Esempio n. 2
0
  def __init__( self, *args, **kwargs ):
    ''' c'tor
    '''
    AgentModule.__init__( self, *args, **kwargs )
    # # replica manager
    self.replicaManager = ReplicaManager()
    # # transformation client
    self.transClient = TransformationClient()
    # # wms client
    self.wmsClient = WMSClient()
    # # request client
    self.requestClient = RequestClient()
    # # file catalog clinet
    self.metadataClient = FileCatalogClient()

    # # placeholders for CS options

    # # transformations types
    self.transformationTypes = None
    # # directory locations
    self.directoryLocations = None
    # # transformation metadata
    self.transfidmeta = None
    # # archive periof in days
    self.archiveAfter = None
    # # active SEs
    self.activeStorages = None
    # # transformation log SEs
    self.logSE = None
    # # enable/disable execution
    self.enableFlag = None
Esempio n. 3
0
  def setReplicaProblematic(self,lfn,se,pfn='',reason='Access failure'):
    """ Set replica status to Problematic in the File Catalog
    @param lfn: lfn of the problematic file
    @param se: storage element
    @param pfn: physical file name
    @param reason: as name suggests...
    @return: S_OK()
    """

    rm = ReplicaManager()
    source = "Job %d at %s" % (self.jobID,DIRAC.siteName())
    result = rm.setReplicaProblematic((lfn,pfn,se,reason),source)
    if not result['OK'] or result['Value']['Failed']:
      # We have failed the report, let's attempt the Integrity DB faiover
      integrityDB = RPCClient('DataManagement/DataIntegrity',timeout=120)
      fileMetadata = {'Prognosis':reason,'LFN':lfn,'PFN':pfn,'StorageElement':se}
      result = integrityDB.insertProblematic(source,fileMetadata)
      if not result['OK']:
        # Add it to the request
        if self.workflow_commons.has_key('Request'):
          request  = self.workflow_commons['Request']
          subrequest = DISETSubRequest(result['rpcStub']).getDictionary()
          request.addSubRequest(subrequest,'integrity')

    return S_OK()
Esempio n. 4
0
    def __init__(self, operation=None, csPath=None):
        """c'tor

    :param self: self reference
    :param Operation operation: Operation instance
    :param str csPath: CS path for this handler
    """
        # # base classes ctor
        super(PutAndRegister, self).__init__(operation, csPath)
        # # gMonitor stuff
        gMonitor.registerActivity("PutAtt", "File put attempts",
                                  "RequestExecutingAgent", "Files/min",
                                  gMonitor.OP_SUM)
        gMonitor.registerActivity("PutFail", "Failed file puts",
                                  "RequestExecutingAgent", "Files/min",
                                  gMonitor.OP_SUM)
        gMonitor.registerActivity("PutOK", "Successful file puts",
                                  "RequestExecutingAgent", "Files/min",
                                  gMonitor.OP_SUM)
        gMonitor.registerActivity("RegisterOK",
                                  "Successful file registrations",
                                  "RequestExecutingAgent", "Files/min",
                                  gMonitor.OP_SUM)
        gMonitor.registerActivity("RegisterFail", "Failed file registrations",
                                  "RequestExecutingAgent", "Files/min",
                                  gMonitor.OP_SUM)

        self.rm = ReplicaManager()
Esempio n. 5
0
def registerInputData(filepath, size, prefix='/cepc/lustre-ro'):
    infoDict = {}
    infoDict['PFN'] = ''
    infoDict['Size'] = size
    infoDict['SE'] = 'IHEP-STORM'
    infoDict['GUID'] = commands.getoutput('uuidgen')
    infoDict['Checksum'] = ''
    fileDict = {}
    lfn = prefix + filepath
    fileDict[lfn] = infoDict
    fcc = FileCatalogClient('DataManagement/FileCatalog')
    rm = ReplicaManager()
    result = {}
    result['lfn'] = lfn
    result['is_registered'] = False

    #查询
    for repeatTimes in range(10):
        is_registered = fcc.isFile(lfn)
        if (is_registered['OK']
                and is_registered['Value']['Successful'].has_key(lfn)):
            break
#         else:
#             continue
    if not is_registered['OK']:  #查询失败
        result['is_registered'] = 'querry error. unkown'
        print 'Failed to query %s in DFC. Error message is %s' % (
            lfn, is_registered['Message'])

    if is_registered['Value']['Successful'][lfn]:  #已注册
        result['is_registered'] = True
        for repeatTimes in range(10):
            is_removed = rm.removeCatalogFile(lfn)  #删除
            if (is_removed['OK']
                    and is_removed['Value']['Successful'][lfn]['FileCatalog']):
                result['is_removed'] = True
                break
#             else:
#                 continue
        if not is_removed['OK']:  #删除失败
            result['is_removed'] = 'remove error'
            print 'Failed to remove %s from DFC.' % lfn
    #add
    for repeatTimes in range(10):
        is_added = fcc.addFile(fileDict)  #add/register
        if (is_added['OK'] and is_added['Value']['Successful'][lfn]):
            result['OK'] = True
            return result
#         else:
#             continue
    if not is_added['OK']:  #add unsuccessfully
        result['OK'] = False
        result['Message'] = is_added['Message']
    elif is_added['Value']['Failed']:
        result['OK'] = False
        result['Message'] = 'Failed to add file' + lfn
    return result
Esempio n. 6
0
 def __init__(self, argumentsDict):
     """ Standard constructor
 """
     self.name = COMPONENT_NAME
     self.log = gLogger.getSubLogger(self.name)
     self.inputData = argumentsDict['InputData']
     self.configuration = argumentsDict['Configuration']
     self.fileCatalogResult = argumentsDict['FileCatalog']
     self.jobID = None
     self.replicaManager = ReplicaManager()
Esempio n. 7
0
    def initialize(self):

        self.RequestDBClient = RequestClient()
        self.ReplicaManager = ReplicaManager()
        # This sets the Default Proxy to used as that defined under
        # /Operations/Shifter/DataManager
        # the shifterProxy option in the Configuration can be used to change this default.
        self.am_setOption('shifterProxy', 'DataManager')

        return S_OK()
Esempio n. 8
0
  def setUp( self ):
    super( IntegrationTest, self ).setUp()

    rm = ReplicaManager()
    res = rm.removeFile( ['/lhcb/testCfg/testVer/LOG/00012345/0006/00012345_00067890.tar',
                          '/lhcb/testCfg/testVer/SIM/00012345/0006/00012345_00067890_1.sim'],
                        force = True )
    if not res['OK']:
      print "Could not remove files", res['Message']
      exit( 1 )
Esempio n. 9
0
  def initialize( self ):
    self.replicaManager = ReplicaManager()
    #self.stagerClient = StorageManagerClient()
    self.dataIntegrityClient = DataIntegrityClient()
    self.storageDB = StorageManagementDB()
    # This sets the Default Proxy to used as that defined under
    # /Operations/Shifter/DataManager
    # the shifterProxy option in the Configuration can be used to change this default.
    self.am_setOption( 'shifterProxy', 'DataManager' )

    return S_OK()
Esempio n. 10
0
def registerInputData(filepath, size, prefix = '/cepc/lustre-ro'):
    infoDict = {}
    infoDict['PFN'] = ''
    infoDict['Size'] = size
    infoDict['SE'] = 'IHEP-STORM'
    infoDict['GUID'] = commands.getoutput('uuidgen')
    infoDict['Checksum'] = ''
    fileDict = {}
    lfn =  prefix + filepath
    fileDict[lfn] = infoDict
    fcc = FileCatalogClient('DataManagement/FileCatalog')
    rm = ReplicaManager()
    result = {}
    result['lfn'] = lfn
    result['is_registered'] = False
    
    #查询
    for repeatTimes in range(10):
        is_registered = fcc.isFile(lfn)
        if (is_registered['OK'] and is_registered['Value']['Successful'].has_key(lfn)):
            break
#         else:
#             continue
    if not is_registered['OK']:#查询失败
        result['is_registered'] = 'querry error. unkown'
        print 'Failed to query %s in DFC. Error message is %s' %(lfn, is_registered['Message'])
        
    if is_registered['Value']['Successful'][lfn]:#已注册
        result['is_registered'] = True
        for repeatTimes in range(10):
            is_removed = rm.removeCatalogFile(lfn)#删除
            if (is_removed['OK'] and is_removed['Value']['Successful'][lfn]['FileCatalog']):
                result['is_removed'] = True
                break
#             else:
#                 continue
        if not is_removed['OK']:#删除失败
            result['is_removed'] = 'remove error'
            print 'Failed to remove %s from DFC.' %lfn
    #add       
    for repeatTimes in range(10):
        is_added = fcc.addFile(fileDict)#add/register
        if (is_added['OK'] and is_added['Value']['Successful'][lfn]):
            result['OK'] = True
            return result
#         else:
#             continue
    if not is_added['OK']:#add unsuccessfully
        result['OK'] = False
        result['Message'] = is_added['Message']
    elif is_added['Value']['Failed']:
        result['OK'] = False
        result['Message'] = 'Failed to add file' + lfn
    return result
Esempio n. 11
0
  def initialize( self ):
    self.replicaManager = ReplicaManager()
    self.stagerClient = StorageManagerClient()
    self.pinLifeTime = 60 * 60 * 24 * 7 # 7 days

    # This sets the Default Proxy to used as that defined under 
    # /Operations/Shifter/DataManager
    # the shifterProxy option in the Configuration can be used to change this default.
    self.am_setOption( 'shifterProxy', 'DataManager' )

    return S_OK()
Esempio n. 12
0
    def __init__(self, requestObject=False):
        """ Constructor function, can specify request object to instantiate 
        FailoverTransfer or a new request object is created.
    """
        self.log = gLogger.getSubLogger("FailoverTransfer")
        self.rm = ReplicaManager()
        self.request = requestObject

        if not self.request:
            self.request = RequestContainer()
            self.request.setRequestName('default_request.xml')
            self.request.setSourceComponent('FailoverTransfer')
Esempio n. 13
0
  def __init__( self, requestObject = None ):
    """ Constructor function, can specify request object to instantiate
        FailoverTransfer or a new request object is created.
    """
    self.log = gLogger.getSubLogger( "FailoverTransfer" )
    self.replicaMgr = ReplicaManager()
    self.request = requestObject

    if not self.request:
      self.request = Request()
      self.request.RequestName = 'default_request.xml'
      self.request.SourceComponent = 'FailoverTransfer'
Esempio n. 14
0
 def __init__(self, argumentsDict):
     """ Standard constructor
 """
     self.name = COMPONENT_NAME
     self.log = gLogger.getSubLogger(self.name)
     self.inputData = argumentsDict['InputData']
     self.configuration = argumentsDict['Configuration']
     self.fileCatalogResult = argumentsDict['FileCatalog']
     # By default put each input data file into a separate directory
     self.inputDataDirectory = argumentsDict.get('InputDataDirectory',
                                                 'PerFile')
     self.jobID = None
     self.replicaManager = ReplicaManager()
     self.counter = 1
Esempio n. 15
0
    def initialize(self):

        self.section = PathFinder.getAgentSection(AGENT_NAME)
        self.RequestDB = RequestDBMySQL()
        self.TransferDB = TransferDB()
        self.DataLog = DataLoggingClient()
        self.factory = StorageFactory()
        self.rm = ReplicaManager()

        # This sets the Default Proxy to used as that defined under
        # /Operations/Shifter/DataManager
        # the shifterProxy option in the Configuration can be used to change this default.
        self.am_setOption('shifterProxy', 'DataManager')

        return S_OK()
Esempio n. 16
0
    def initialize(self):

        self.RequestDBClient = RequestClient()
        self.ReplicaManager = ReplicaManager()
        self.DataLog = DataLoggingClient()

        self.maxNumberOfThreads = self.am_getOption('NumberOfThreads', 1)
        self.threadPoolDepth = self.am_getOption('ThreadPoolDepth', 1)
        self.threadPool = ThreadPool(1, self.maxNumberOfThreads)

        # This sets the Default Proxy to used as that defined under
        # /Operations/Shifter/DataManager
        # the shifterProxy option in the Configuration can be used to change this default.
        self.am_setOption('shifterProxy', 'DataManager')

        return S_OK()
Esempio n. 17
0
    def initialize(self):
        self.replicaManager = ReplicaManager()
        #self.stagerClient = StorageManagerClient()
        self.dataIntegrityClient = DataIntegrityClient()
        self.storageDB = StorageManagementDB()
        # pin lifetime = 1 day
        self.pinLifetime = self.am_getOption('PinLifetime', THROTTLING_TIME)
        # Resources helper
        self.resources = Resources()

        # This sets the Default Proxy to used as that defined under
        # /Operations/Shifter/DataManager
        # the shifterProxy option in the Configuration can be used to change this default.
        self.am_setOption('shifterProxy', 'DataManager')

        return S_OK()
Esempio n. 18
0
    def __init__(self):
        """Module initialization.
    """
        super(UploadLogFile, self).__init__()
        self.version = __RCSID__
        self.log = gLogger.getSubLogger("UploadLogFile")
        self.PRODUCTION_ID = None
        self.JOB_ID = None
        self.workflow_commons = None
        self.request = None
        self.logFilePath = ""
        self.logLFNPath = ""
        self.logdir = ""
        self.logSE = self.ops.getValue("/LogStorage/LogSE", "LogSE")
        self.root = gConfig.getValue("/LocalSite/Root", os.getcwd())
        self.logSizeLimit = self.ops.getValue("/LogFiles/SizeLimit", 20 * 1024 * 1024)
        self.logExtensions = []
        self.failoverSEs = gConfig.getValue("/Resources/StorageElementGroups/Tier1-Failover", [])
        self.diracLogo = self.ops.getValue(
            "/SAM/LogoURL", "https://lhcbweb.pic.es/DIRAC/images/logos/DIRAC-logo-transp.png"
        )
        self.rm = ReplicaManager()

        self.experiment = "CLIC"
        self.enable = True
        self.failoverTest = False  # flag to put log files to failover by default
        self.jobID = ""
Esempio n. 19
0
 def replicaManager(cls):
     """ ReplicaManager getter 
 :param cls: class reference
 """
     if not cls.__replicaManager:
         cls.__replicaManager = ReplicaManager()
     return cls.__replicaManager
Esempio n. 20
0
  def __init__( self, *args, **kwargs ):
    ''' c'tor
    '''
    AgentModule.__init__( self, *args, **kwargs )
    # # replica manager
    self.replicaManager = ReplicaManager()
    # # transformation client
    self.transClient = TransformationClient()
    # # wms client
    self.wmsClient = WMSClient()
    # # request client
    self.requestClient = RequestClient()
    # # file catalog clinet
    self.metadataClient = FileCatalogClient()

    # # placeholders for CS options

    # # transformations types
    self.transformationTypes = None
    # # directory locations
    self.directoryLocations = None
    # # transformation metadata
    self.transfidmeta = None
    # # archive periof in days
    self.archiveAfter = None
    # # active SEs
    self.activeStorages = None
    # # transformation log SEs
    self.logSE = None
    # # enable/disable execution
    self.enableFlag = None
Esempio n. 21
0
  def __init__(self):
    super(OverlayInput, self).__init__()
    self.enable = True
    self.STEP_NUMBER = ''
    self.log = gLogger.getSubLogger( "OverlayInput" )
    self.applicationName = 'OverlayInput'
    self.curdir = os.getcwd()
    self.applicationLog = ''
    self.printoutflag = ''
    self.prodid = 0
    self.detector = '' ##needed for backward compatibility
    self.detectormodel = ""
    self.energytouse = ''
    self.energy = 0
    self.nbofeventsperfile = 100
    self.lfns = []
    self.nbfilestoget = 0
    self.BkgEvtType = 'gghad'
    self.BXOverlay = 0
    self.ggtohadint = 3.2
    self.nbsigeventsperfile = 0
    self.nbinputsigfile = 1
    self.NbSigEvtsPerJob = 0
    self.rm = ReplicaManager()
    self.fc = FileCatalogClient()
    self.site = DIRAC.siteName()

    self.machine = 'clic_cdr'
Esempio n. 22
0
    def __init__(self, loggerIn=None):
        """ Initialization of module base.

        loggerIn is a logger object that can be passed so that the logging will be more clear.
    """

        if not loggerIn:
            self.log = gLogger.getSubLogger('ModuleBase')
        else:
            self.log = loggerIn

        # These 2 are used in many places, so it's good to have them available here.
        self.opsH = Operations()
        self.rm = ReplicaManager()

        # Some job parameters
        self.production_id = 0
        self.prod_job_id = 0
        self.jobID = 0
        self.step_number = 0
        self.step_id = 0
        self.jobType = ''
        self.executable = ''
        self.command = None

        self.workflowStatus = None
        self.stepStatus = None
        self.workflow_commons = None
        self.step_commons = None

        # These are useful objects (see the getFileReporter(), getJobReporter() and getRequestContainer() functions)
        self.fileReport = None
        self.jobReport = None
        self.request = None
  def initialize( self ):
    """Sets defaults """
    self.replicaManager = ReplicaManager()
    self.transClient = TransformationClient()
    self.wmsClient = WMSClient()
    self.requestClient = RequestClient()
    self.metadataClient = FileCatalogClient()
    self.storageUsageClient = StorageUsageClient()

    # This sets the Default Proxy to used as that defined under 
    # /Operations/Shifter/DataManager
    # the shifterProxy option in the Configuration can be used to change this default.
    self.am_setOption( 'shifterProxy', 'DataManager' )

    self.transformationTypes = sortList( self.am_getOption( 'TransformationTypes', ['MCSimulation', 'DataReconstruction', 'DataStripping', 'MCStripping', 'Merge', 'Replication'] ) )
    gLogger.info( "Will consider the following transformation types: %s" % str( self.transformationTypes ) )
    self.directoryLocations = sortList( self.am_getOption( 'DirectoryLocations', ['TransformationDB', 'StorageUsage', 'MetadataCatalog'] ) )
    gLogger.info( "Will search for directories in the following locations: %s" % str( self.directoryLocations ) )
    self.transfidmeta = self.am_getOption( 'TransfIDMeta', "TransformationID" )
    gLogger.info( "Will use %s as metadata tag name for TransformationID" % self.transfidmeta )
    self.archiveAfter = self.am_getOption( 'ArchiveAfter', 7 ) # days
    gLogger.info( "Will archive Completed transformations after %d days" % self.archiveAfter )
    self.activeStorages = sortList( self.am_getOption( 'ActiveSEs', [] ) )
    gLogger.info( "Will check the following storage elements: %s" % str( self.activeStorages ) )
    self.logSE = self.am_getOption( 'TransformationLogSE', 'LogSE' )
    gLogger.info( "Will remove logs found on storage element: %s" % self.logSE )
    return S_OK()
Esempio n. 24
0
 def __init__(self):
   super(WhizardAnalysis, self).__init__()
   self.enable = True
   self.STEP_NUMBER = ''
   self.debug = True
   self.log = gLogger.getSubLogger( "WhizardAnalysis" )
   self.SteeringFile = ''
   self.OutputFile = ''
   self.NumberOfEvents = 1
   self.Lumi = 0
   self.applicationName = 'whizard'
   self.evttype = ""
   self.RandomSeed = 0
   self.energy = 3000.
   self.getProcessInFile = False
   self.rm = ReplicaManager()
   self.processlist = None
   self.jobindex = None
   self.parameters = {}
   self.susymodel = 0
   self.Model = ''
   self.genmodel = GeneratorModels()
   self.eventstring = ['! ', 'Fatal error:', 'PYSTOP', 'No matrix element available',
                       'Floating point exception', 'Event generation finished.', " n_events","luminosity", "  sum            "]
   self.excludeAllButEventString = False
   self.steeringparameters = ''
   self.options = None
   self.optionsdict = {}
   self.OptionsDictStr = ''
   self.GenLevelCutDictStr = ''
   self.genlevelcuts = {}
   self.willCut = False
   self.useGridFiles = False
Esempio n. 25
0
    def finish(self):
        """
        after having set all the files, this one does all the job
        @return:
        """
        rc = 0
        rm = ReplicaManager()
        for item in self.listFileStaged:
            #print("SE '"+self.SE+"' == : '"+str(self.SE == "False")+"'")
            if not self.SE:
                self.log.info("No SE available for '" + item[0] + "'")
                rc += 1
                continue
            else:
                self.log.info("Trying to store '" + item[0] + "' in SE : '" +
                              self.SE + "' ...")
                result = rm.putAndRegister(item[1], item[0], self.SE)
                if not result['OK']:
                    self.log.info('ERROR %s' % (result['Message']))

                    self.log.info("Wait 5sec before trying again...")
                    time.sleep(5)
                    result = rm.putAndRegister(item[1], item[0], self.SE)
                    if not result['OK']:
                        self.log.info('ERROR %s' % (result['Message']))
                        while not result['OK']:
                            self.listSEs.remove(
                                self.SE
                            )  # make sure not to pick the same SE again.
                            self.__pickRandomSE()
                            if not self.SE:
                                break
                            self.log.info("Trying with another SE : '" +
                                          self.SE + "' . In 5sec...")
                            time.sleep(5)
                            result = rm.putAndRegister(item[1], item[0],
                                                       self.SE)
                            if result['OK']:
                                self.log.info("file stored : '" + item[1] +
                                              "' in '" + self.SE + "'")
                            else:
                                self.log.error(
                                    "ERROR : failed to store the file '" +
                                    item[1] + "' ...")
                                rc += 1

        return rc
Esempio n. 26
0
 def replicaManager( cls ):
   """ ReplicaManager getter 
   :param cls: class reference
   """
   if not cls.__replicaManager:
     from DIRAC.DataManagementSystem.Client.ReplicaManager import ReplicaManager
     cls.__replicaManager = ReplicaManager()
   return cls.__replicaManager
Esempio n. 27
0
def removeOutputData(baseDir):
    res = getProxyInfo(False, False)
    if not res["OK"]:
        gLogger.error("Failed to get client proxy information.", res["Message"])
        return S_ERROR("Failed to get client proxy information.", res["Message"])

    # ######################################################################################################## #
    rm = ReplicaManager()

    result = rm.cleanLogicalDirectory(baseDir)
    print "Ignore the message about the file '" + baseDir + "dirac_directory'"

    if not result["OK"]:
        print "ERROR: %s" % (result["Message"])
        return S_ERROR("Failed to Suppress the directory : '" + baseDir + "'")

    return S_OK(baseDir + " has been supressed")
Esempio n. 28
0
  def _getClients( self ):
    """ returns the clients used in the threads
    """
    threadTransformationClient = TransformationClient()
    threadReplicaManager = ReplicaManager()

    return {'TransformationClient': threadTransformationClient,
            'ReplicaManager': threadReplicaManager}
Esempio n. 29
0
 def __prepareFileForHTTP( self, lfn, key ):
   """ proxied preapre file for HTTP """
   global HTTP_PATH
   
   res = self.__prepareSecurityDetails()
   if not res['OK']:
     return res
 
   # Clear the local cache
   getFileDir = "%s/%s" % ( HTTP_PATH, key )
   os.makedirs(getFileDir)
  
   # Get the file to the cache 
   from DIRAC.DataManagementSystem.Client.ReplicaManager import ReplicaManager
   replicaMgr = ReplicaManager()
   result = replicaMgr.getFile( lfn, destinationDir=getFileDir )
   result['CachePath'] = getFileDir
   return result  
Esempio n. 30
0
    def __prepareFileForHTTP(self, lfn, key):

        global httpPath

        res = self.__prepareSecurityDetails()
        if not res['OK']:
            return res

        # Clear the local cache
        getFileDir = "%s/%s" % (httpPath, key)
        os.makedirs(getFileDir)

        # Get the file to the cache
        from DIRAC.DataManagementSystem.Client.ReplicaManager import ReplicaManager
        rm = ReplicaManager()
        result = rm.getFile(lfn, destinationDir=getFileDir)
        result['CachePath'] = getFileDir
        return result
Esempio n. 31
0
 def __copyToExternalSE( self, localFilePath, sbPath ):
   """
   Copy uploaded file to external SE
   """
   try:
     rm = ReplicaManager()
     result = rm.put( sbPath, localFilePath, self.__externalSEName )
     if not result[ 'OK' ]:
       return result
     if 'Successful' not in result[ 'Value' ]:
       gLogger.verbose( "Oops, no successful transfers there", str( result ) )
       return S_ERROR( "RM returned OK to the action but no successful transfers were there" )
     okTrans = result[ 'Value' ][ 'Successful' ]
     if sbPath not in okTrans:
       gLogger.verbose( "Ooops, SB transfer wasn't in the successful ones", str( result ) )
       return S_ERROR( "RM returned OK to the action but SB transfer wasn't in the successful ones" )
     return S_OK( ( self.__externalSEName, okTrans[ sbPath ] ) )
   except Exception, e:
     return S_ERROR( "Error while moving sandbox to SE: %s" % str( e ) )
Esempio n. 32
0
  def initialize( self ):

    self.RequestDBClient = RequestClient()
    self.ReplicaManager = ReplicaManager()
    # This sets the Default Proxy to used as that defined under 
    # /Operations/Shifter/DataManager
    # the shifterProxy option in the Configuration can be used to change this default.
    self.am_setOption( 'shifterProxy', 'DataManager' )

    return S_OK()
Esempio n. 33
0
def removeOutputData(baseDir):
    res = getProxyInfo(False, False)
    if not res['OK']:
        gLogger.error("Failed to get client proxy information.",
                      res['Message'])
        return S_ERROR("Failed to get client proxy information.",
                       res['Message'])

    # ######################################################################################################## #
    rm = ReplicaManager()

    result = rm.cleanLogicalDirectory(baseDir)
    print "Ignore the message about the file '" + baseDir + "dirac_directory'"

    if not result['OK']:
        print 'ERROR: %s' % (result['Message'])
        return S_ERROR("Failed to Suppress the directory : '" + baseDir + "'")

    return S_OK(baseDir + " has been supressed")
Esempio n. 34
0
 def __init__(self):
   """Module initialization.
   """
   super(GetSRMFile, self).__init__()
   self.version = __RCSID__
   self.log = gLogger.getSubLogger('GetSRMFile')
   self.repman = ReplicaManager()
   self.srmfiles = []
   self.files = []
   self.counter = 1
Esempio n. 35
0
 def __init__( self, argumentsDict ):
   """ Standard constructor
   """
   self.name = COMPONENT_NAME
   self.log = gLogger.getSubLogger( self.name )
   self.inputData = argumentsDict['InputData']
   self.configuration = argumentsDict['Configuration']
   self.fileCatalogResult = argumentsDict['FileCatalog']
   self.jobID = None
   self.rm = ReplicaManager()
Esempio n. 36
0
    def initialize(self):
        """Sets defaults """
        self.replicaManager = ReplicaManager()
        self.transClient = TransformationClient()
        self.wmsClient = WMSClient()
        self.requestClient = RequestClient()
        self.metadataClient = FileCatalogClient()
        self.storageUsageClient = StorageUsageClient()

        # This sets the Default Proxy to used as that defined under
        # /Operations/Shifter/DataManager
        # the shifterProxy option in the Configuration can be used to change this default.
        self.am_setOption('shifterProxy', 'DataManager')

        self.transformationTypes = sortList(
            self.am_getOption('TransformationTypes', [
                'MCSimulation', 'DataReconstruction', 'DataStripping',
                'MCStripping', 'Merge', 'Replication'
            ]))
        gLogger.info("Will consider the following transformation types: %s" %
                     str(self.transformationTypes))
        self.directoryLocations = sortList(
            self.am_getOption(
                'DirectoryLocations',
                ['TransformationDB', 'StorageUsage', 'MetadataCatalog']))
        gLogger.info(
            "Will search for directories in the following locations: %s" %
            str(self.directoryLocations))
        self.transfidmeta = self.am_getOption('TransfIDMeta',
                                              "TransformationID")
        gLogger.info("Will use %s as metadata tag name for TransformationID" %
                     self.transfidmeta)
        self.archiveAfter = self.am_getOption('ArchiveAfter', 7)  # days
        gLogger.info("Will archive Completed transformations after %d days" %
                     self.archiveAfter)
        self.activeStorages = sortList(self.am_getOption('ActiveSEs', []))
        gLogger.info("Will check the following storage elements: %s" %
                     str(self.activeStorages))
        self.logSE = self.am_getOption('TransformationLogSE', 'LogSE')
        gLogger.info("Will remove logs found on storage element: %s" %
                     self.logSE)
        return S_OK()
Esempio n. 37
0
 def getProcessList(self): 
   """ Get the L{ProcessList} needed by L{Whizard}.
   @return: process list object
   """   
   processlistpath = gConfig.getValue("/LocalSite/ProcessListPath", "")
   if not processlistpath:
     gLogger.info('Will download the process list locally. To gain time, please put it somewhere and add to \
     your dirac.cfg the entry /LocalSite/ProcessListPath pointing to the file')
     pathtofile = self.ops.getValue("/ProcessList/Location", "")
     if not pathtofile:
       gLogger.error("Could not get path to process list")
       processlist = ""
     else:
       rm = ReplicaManager()
       rm.getFile(pathtofile)
       processlist = os.path.basename(pathtofile)   
   else:
     processlist = processlistpath
   self.pl = ProcessList(processlist)
   return self.pl
Esempio n. 38
0
    def __init__(self, useCertificates=False):
        """c'tor

    :param self: self reference
    :param bool useCertificates: flag to enable/disable certificates
    """
        Client.__init__(self)
        self.log = gLogger.getSubLogger("DataManagement/FTSClient")
        self.setServer("DataManagement/FTSManager")

        # getting other clients
        self.ftsValidator = FTSValidator()
        self.replicaManager = ReplicaManager()
        self.storageFactory = StorageFactory()

        url = PathFinder.getServiceURL("DataManagement/FTSManager")
        if not url:
            raise RuntimeError(
                "CS option DataManagement/FTSManager URL is not set!")
        self.ftsManager = RPCClient(url)
Esempio n. 39
0
    def __init__(self, operation=None, csPath=None):
        """c'tor

    :param self: self reference
    :param Operation operation: Operation instance
    :param str csPath: CS path for this handler
    """
        super(ReplicateAndRegister, self).__init__(operation, csPath)
        # # own gMonitor stuff for files
        gMonitor.registerActivity("ReplicateAndRegisterAtt",
                                  "Replicate and register attempted",
                                  "RequestExecutingAgent", "Files/min",
                                  gMonitor.OP_SUM)
        gMonitor.registerActivity("ReplicateOK", "Replications successful",
                                  "RequestExecutingAgent", "Files/min",
                                  gMonitor.OP_SUM)
        gMonitor.registerActivity("ReplicateFail", "Replications failed",
                                  "RequestExecutingAgent", "Files/min",
                                  gMonitor.OP_SUM)
        gMonitor.registerActivity("RegisterOK", "Registrations successful",
                                  "RequestExecutingAgent", "Files/min",
                                  gMonitor.OP_SUM)
        gMonitor.registerActivity("RegisterFail", "Registrations failed",
                                  "RequestExecutingAgent", "Files/min",
                                  gMonitor.OP_SUM)
        # # for FTS
        gMonitor.registerActivity("FTSScheduleAtt", "Files schedule attempted",
                                  "RequestExecutingAgent", "Files/min",
                                  gMonitor.OP_SUM)
        gMonitor.registerActivity("FTSScheduleOK", "File schedule successful",
                                  "RequestExecutingAgent", "Files/min",
                                  gMonitor.OP_SUM)
        gMonitor.registerActivity("FTSScheduleFail", "File schedule failed",
                                  "RequestExecutingAgent", "Files/min",
                                  gMonitor.OP_SUM)
        # # SE cache
        self.seCache = {}

        # Clients
        self.rm = ReplicaManager()
        self.ftsClient = FTSClient()
Esempio n. 40
0
 def __init__(self):
   '''
   Constructor
   '''
   super(MoveInFC, self).__init__()
   self.enable = False
   self.STEP_NUMBER = ''
   self.log = gLogger.getSubLogger( "MoveInFC" )
   self.applicationName = 'MoveInFC'
   self.rm = ReplicaManager()
   self.listoutput = {}
   self.outputpath = ''
Esempio n. 41
0
  def __init__(self,requestObject=False):
    """ Constructor function, can specify request object to instantiate 
        FailoverTransfer or a new request object is created.
    """
    self.log = gLogger.getSubLogger( "FailoverTransfer" )    
    self.rm = ReplicaManager()
    self.request = requestObject    
 
    if not self.request:
      self.request = RequestContainer()
      self.request.setRequestName('default_request.xml')
      self.request.setSourceComponent('FailoverTransfer')
Esempio n. 42
0
  def __init__( self, *args, **kwargs ):
    """ c'tor
    """
    AgentModule.__init__( self, *args, **kwargs )

    self.integrityClient = DataIntegrityClient()
    self.replicaManager = ReplicaManager()
    self.transClient = TransformationClient()
    self.fileCatalogClient = FileCatalogClient()

    agentTSTypes = self.am_getOption( 'TransformationTypes', [] )
    if agentTSTypes:
      self.transformationTypes = agentTSTypes
    else:
      self.transformationTypes = Operations().getValue( 'Transformations/DataProcessing', ['MCSimulation', 'Merge'] )

    self.directoryLocations = sortList( self.am_getOption( 'DirectoryLocations', ['TransformationDB',
                                                                                  'MetadataCatalog'] ) )
    self.activeStorages = sortList( self.am_getOption( 'ActiveSEs', [] ) )
    self.transfidmeta = self.am_getOption( 'TransfIDMeta', "TransformationID" )
    self.enableFlag = True
Esempio n. 43
0
    def initializeOptimizer(cls):
        """Initialize specific parameters
    """
        cls.ex_setProperty('shifterProxy', 'DataManager')
        cls.__SEStatus = DictCache.DictCache()

        try:
            cls.__replicaMan = ReplicaManager()
        except Exception, e:
            msg = 'Failed to create ReplicaManager'
            cls.log.exception(msg)
            return S_ERROR(msg + str(e))
Esempio n. 44
0
 def __init__( self, argumentsDict ):
   """ Standard constructor
   """
   self.name = COMPONENT_NAME
   self.log = gLogger.getSubLogger( self.name )
   self.inputData = argumentsDict['InputData']
   self.configuration = argumentsDict['Configuration']
   self.fileCatalogResult = argumentsDict['FileCatalog']
   # By default put each input data file into a separate directory
   self.inputDataDirectory = argumentsDict.get( 'InputDataDirectory', 'PerFile' )
   self.jobID = None
   self.replicaManager = ReplicaManager()
   self.counter = 1
Esempio n. 45
0
    def finish(self):
        """
        after having set all the files, this one does all the job
        @return:
        """
        rc = 0
        rm = ReplicaManager()
        for item in self.listFileStaged:   
            #self.log.info("SE '"+self.SE+"' == : '"+str(self.SE == "False")+"'")
            if not self.SE:
                self.log.info("No SE available for '"+item[0]+"'")
                rc+=1
                continue
            else:
                self.log.info("Trying to store '"+item[0]+"' in SE : '"+self.SE+"' ...")
                result = rm.putAndRegister( item[1], item[0], self.SE)
                if not result['OK']:
                    self.log.warning('ERROR %s' % ( result['Message'] ))
                    self.listSEs.remove(self.SE) # make sure not to pick the same SE again.             

        self.log.info("Wait 5sec before trying again...")
        time.sleep(5)
        result = rm.putAndRegister( item[1], item[0], self.SE)
        if not result['OK']:
            self.log.warning('ERROR %s' % ( result['Message'] ))
            while   not result['OK'] :
                self.__pickRandomSE()
                if not self.SE:
                    break
                self.log.info("Trying with another SE : '"+self.SE+"' . In 5sec...")
                time.sleep(5)
                result = rm.putAndRegister( item[1], item[0], self.SE)
                if result['OK']:
                    self.log.info("file stored : '"+item[1]+"' in '"+self.SE+"'")
                else:
                    self.log.info("ERROR : failed to store the file '"+item[1]+"' ...")
                    self.listSEs.remove(self.SE) # make sure not to pick the same SE again. 
                    rc += 1
        return rc
Esempio n. 46
0
  def initialize( self ):
    """
      Called by the framework upon startup, before any cycle (execute method bellow)
    """
    self.requestDBClient = RequestClient()
    # the RequestAgentMixIn needs the capitalized version, until is is fixed keep this.
    self.RequestDBClient = self.requestDBClient
    self.replicaManager = ReplicaManager()

    gMonitor.registerActivity( "Iteration", "Agent Loops", "RemovalAgent", "Loops/min", gMonitor.OP_SUM )
    gMonitor.registerActivity( "Execute", "Request Processed", "RemovalAgent", "Requests/min", gMonitor.OP_SUM )
    gMonitor.registerActivity( "Done", "Request Completed", "RemovalAgent", "Requests/min", gMonitor.OP_SUM )

    gMonitor.registerActivity( "PhysicalRemovalAtt", "Physical removals attempted",
                               "RemovalAgent", "Removal/min", gMonitor.OP_SUM )
    gMonitor.registerActivity( "PhysicalRemovalDone", "Successful physical removals",
                               "RemovalAgent", "Removal/min", gMonitor.OP_SUM )
    gMonitor.registerActivity( "PhysicalRemovalFail", "Failed physical removals",
                               "RemovalAgent", "Removal/min", gMonitor.OP_SUM )
    gMonitor.registerActivity( "PhysicalRemovalSize", "Physically removed size",
                               "RemovalAgent", "Bytes", gMonitor.OP_ACUM )

    gMonitor.registerActivity( "ReplicaRemovalAtt", "Replica removal attempted",
                               "RemovalAgent", "Removal/min", gMonitor.OP_SUM )
    gMonitor.registerActivity( "ReplicaRemovalDone", "Successful replica removals",
                               "RemovalAgent", "Removal/min", gMonitor.OP_SUM )
    gMonitor.registerActivity( "ReplicaRemovalFail", "Failed replica removals",
                               "RemovalAgent", "Removal/min", gMonitor.OP_SUM )

    gMonitor.registerActivity( "RemoveFileAtt", "File removal attempted",
                               "RemovalAgent", "Removal/min", gMonitor.OP_SUM )
    gMonitor.registerActivity( "RemoveFileDone", "File removal done",
                               "RemovalAgent", "Removal/min", gMonitor.OP_SUM )
    gMonitor.registerActivity( "RemoveFileFail", "File removal failed",
                               "RemovalAgent", "Removal/min", gMonitor.OP_SUM )

    self.maxNumberOfThreads = self.am_getOption( 'NumberOfThreads', self.maxNumberOfThreads )
    self.maxRequestsInQueue = self.am_getOption( 'RequestsInQueue', self.maxRequestsInQueue )
    self.threadPool = ThreadPool( 1, self.maxNumberOfThreads, self.maxRequestsInQueue )

    # Set the ThreadPool in daemon mode to process new ThreadedJobs as they are inserted
    self.threadPool.daemonize()

    self.maxRequests = self.am_getOption( 'MaxRequestsPerCycle', 1200. )

    # This sets the Default Proxy to used as that defined under
    # /Operations/Shifter/DataManager
    # the shifterProxy option in the Configuration can be used to change this default.
    self.am_setOption( 'shifterProxy', 'DataManager' )

    return S_OK()
Esempio n. 47
0
    def __init__(self, agentName, baseAgentName=False, properties=dict()):
        """ c'tor

    :param self: self reference
    :param str agentName: name of agent
    :param bool baseAgentName: whatever  
    :param dict properties: whatever else
    """
        AgentModule.__init__(self, agentName, baseAgentName, properties)
        ## replica manager
        self.replicaManager = ReplicaManager()
        ## transformation client
        self.transClient = TransformationClient()
        ## wms client
        self.wmsClient = WMSClient()
        ## request client
        self.requestClient = RequestClient()
        ## file catalog clinet
        self.metadataClient = FileCatalogClient()
        ## storage usage agent
        self.storageUsageClient = StorageUsageClient()

        ## placeholders for CS options

        ## transformations types
        self.transformationTypes = None
        ## directory locations
        self.directoryLocations = None
        ## transformation metadata
        self.transfidmeta = None
        ## archive periof in days
        self.archiveAfter = None
        ## active SEs
        self.activeStorages = None
        ## transformation log SEs
        self.logSE = None
        ## enable/disable execution
        self.enableFlag = None
  def initialize( self ):
    """Sets defaults
    """
    self.integrityClient = DataIntegrityClient()
    self.replicaManager = ReplicaManager()
    self.transClient = TransformationClient()
    self.storageUsageClient = StorageUsageClient()
    self.fileCatalogClient = FileCatalogClient()

    # This sets the Default Proxy to used as that defined under 
    # /Operations/Shifter/DataManager
    # the shifterProxy option in the Configuration can be used to change this default.
    self.am_setOption( 'shifterProxy', 'DataManager' )

    self.transformationTypes = sortList( self.am_getOption( 'TransformationTypes', ['MCSimulation', 'DataReconstruction', 'DataStripping', 'MCStripping', 'Merge'] ) )
    gLogger.info( "Will treat the following transformation types: %s" % str( self.transformationTypes ) )
    self.directoryLocations = sortList( self.am_getOption( 'DirectoryLocations', ['TransformationDB', 'StorageUsage', 'MetadataCatalog'] ) )
    gLogger.info( "Will search for directories in the following locations: %s" % str( self.directoryLocations ) )
    self.activeStorages = sortList( self.am_getOption( 'ActiveSEs', [] ) )
    gLogger.info( "Will check the following storage elements: %s" % str( self.activeStorages ) )
    self.transfidmeta = self.am_getOption( 'TransfIDMeta', "TransformationID" )
    gLogger.info( "Will use %s as metadata tag name for TransformationID" % self.transfidmeta )
    return S_OK()
Esempio n. 49
0
    def initialize(self):
        self.pluginLocation = self.am_getOption(
            'PluginLocation',
            'DIRAC.TransformationSystem.Agent.TransformationPlugin')
        self.checkCatalog = self.am_getOption('CheckCatalog', 'yes')

        # This sets the Default Proxy to used as that defined under
        # /Operations/Shifter/ProductionManager
        # the shifterProxy option in the Configuration can be used to change this default.
        self.am_setOption('shifterProxy', 'ProductionManager')

        self.transDB = TransformationClient('TransformationDB')
        self.rm = ReplicaManager()
        return S_OK()
Esempio n. 50
0
 def initialize(self):
   """Sets defaults
   """
   self.enableFlag = '' #defined below
   self.replicaManager = ReplicaManager()
   self.prodDB = TransformationClient()
   self.requestClient = RequestClient()
   self.taskIDName = 'TaskID' 
   self.externalStatus = 'ExternalStatus'
   self.externalID = 'ExternalID'
   self.am_setOption('PollingTime',2*60*60) #no stalled jobs are considered so can be frequent
   self.enableFlag = self.am_getOption('EnableFlag', False)
   self.am_setModuleParam("shifterProxy", "ProductionManager")
   self.ops = Operations()
   return S_OK()
Esempio n. 51
0
  def initialize( self ):

    self.section = PathFinder.getAgentSection( AGENT_NAME )
    self.RequestDB = RequestDBMySQL()
    self.TransferDB = TransferDB()
    self.DataLog = DataLoggingClient()
    self.factory = StorageFactory()
    self.rm = ReplicaManager()

    # This sets the Default Proxy to used as that defined under
    # /Operations/Shifter/DataManager
    # the shifterProxy option in the Configuration can be used to change this default.
    self.am_setOption( 'shifterProxy', 'DataManager' )

    return S_OK()
Esempio n. 52
0
def downloadFile(TarBallURL, app_tar, folder_name):
  """ Get the file locally.
  """
  #need to make sure the url ends with /, other wise concatenation below returns bad url
  if TarBallURL[-1] != "/":
    TarBallURL += "/"

  app_tar_base = os.path.basename(app_tar)
  if TarBallURL.find("http://")>-1:
    try :
      gLogger.debug("Downloading software", '%s' % (folder_name))
      #Copy the file locally, don't try to read from remote, soooo slow
      #Use string conversion %s%s to set the address, makes the system more stable
      urllib.urlretrieve("%s%s" % (TarBallURL, app_tar), app_tar_base)
    except:
      gLogger.exception()
      return S_ERROR('Exception during url retrieve')
  else:
    rm = ReplicaManager()
    resget = rm.getFile("%s%s" % (TarBallURL, app_tar))
    if not resget['OK']:
      gLogger.error("File could not be downloaded from the grid")
      return resget
  return S_OK()
Esempio n. 53
0
  def initialize( self ):
    self.replicaManager = ReplicaManager()
    #self.stagerClient = StorageManagerClient()
    self.dataIntegrityClient = DataIntegrityClient()
    self.storageDB = StorageManagementDB()
    # pin lifetime = 1 day
    self.pinLifetime = self.am_getOption( 'PinLifetime', THROTTLING_TIME )
    # Resources helper
    self.resources = Resources()

    # This sets the Default Proxy to used as that defined under
    # /Operations/Shifter/DataManager
    # the shifterProxy option in the Configuration can be used to change this default.
    self.am_setOption( 'shifterProxy', 'DataManager' )

    return S_OK()
Esempio n. 54
0
  def initialize( self ):

    self.RequestDBClient = RequestClient()
    self.ReplicaManager = ReplicaManager()
    self.DataLog = DataLoggingClient()

    self.maxNumberOfThreads = self.am_getOption( 'NumberOfThreads', 1 )
    self.threadPoolDepth = self.am_getOption( 'ThreadPoolDepth', 1 )
    self.threadPool = ThreadPool( 1, self.maxNumberOfThreads )

    # This sets the Default Proxy to used as that defined under
    # /Operations/Shifter/DataManager
    # the shifterProxy option in the Configuration can be used to change this default.
    self.am_setOption( 'shifterProxy', 'DataManager' )

    return S_OK()
Esempio n. 55
0
  def __init__( self, *args, **kwargs ):
    """ c'tor
    """
    AgentModule.__init__( self, *args, **kwargs )

    self.integrityClient = DataIntegrityClient()
    self.replicaManager = ReplicaManager()
    self.transClient = TransformationClient()
    self.fileCatalogClient = FileCatalogClient()

    agentTSTypes = self.am_getOption( 'TransformationTypes', [] )
    if agentTSTypes:
      self.transformationTypes = agentTSTypes
    else:
      self.transformationTypes = Operations().getValue( 'Transformations/DataProcessing', ['MCSimulation', 'Merge'] )

    self.directoryLocations = sortList( self.am_getOption( 'DirectoryLocations', ['TransformationDB', 'MetadataCatalog'] ) )
    self.activeStorages = sortList( self.am_getOption( 'ActiveSEs', [] ) )
    self.transfidmeta = self.am_getOption( 'TransfIDMeta', "TransformationID" )
Esempio n. 56
0
  def __init__( self, useCertificates = False ):
    """c'tor

    :param self: self reference
    :param bool useCertificates: flag to enable/disable certificates
    """
    Client.__init__( self )
    self.log = gLogger.getSubLogger( "DataManagement/FTSClient" )
    self.setServer( "DataManagement/FTSManager" )

    # getting other clients
    self.ftsValidator = FTSValidator()
    self.replicaManager = ReplicaManager()
    self.storageFactory = StorageFactory()

    url = PathFinder.getServiceURL( "DataManagement/FTSManager" )
    if not url:
      raise RuntimeError( "CS option DataManagement/FTSManager URL is not set!" )
    self.ftsManager = RPCClient( url )