Esempio n. 1
0
    def __init__(self, operation=None, csPath=None):
        """c'tor

    :param Operation operation: Operation instance
    :param str csPath: config path in CS for this operation
    """
        # # placeholders for operation and request
        self.operation = None
        self.request = None

        self.dm = DataManager()
        self.fc = FileCatalog()

        self.csPath = csPath if csPath else ""
        # # get name
        name = self.__class__.__name__
        # # all options are r/o properties now
        csOptionsDict = gConfig.getOptionsDict(self.csPath)
        csOptionsDict = csOptionsDict.get("Value", {})

        for option, value in csOptionsDict.iteritems():
            # # hack to set proper types
            try:
                value = eval(value)
            except NameError:
                pass
            self.makeProperty(option, value, True)  #pylint: disable=no-member

        # # pre setup logger
        self.log = gLogger.getSubLogger(name, True)
        # # set log level
        logLevel = getattr(self, "LogLevel") if hasattr(self,
                                                        "LogLevel") else "INFO"
        self.log.setLevel(logLevel)

        # # list properties
        for option in csOptionsDict:
            self.log.debug("%s = %s" % (option, getattr(self, option)))

        # # setup operation
        if operation:
            self.setOperation(operation)
        # # initialize at least
        if hasattr(self, "initialize") and callable(getattr(
                self, "initialize")):
            getattr(self, "initialize")()
Esempio n. 2
0
    def __init__(self,
                 plugin='Standard',
                 transClient=None,
                 dataManager=None,
                 fc=None,
                 debug=False,
                 transInThread=None,
                 transID=None):
        """
    c'tor

    Setting defaults
    """
        # clients
        if transClient is None:
            self.transClient = TransformationClient()
        else:
            self.transClient = transClient
        if dataManager is None:
            self.dm = DataManager()
        else:
            self.dm = dataManager
        if fc is None:
            self.fc = FileCatalog()
        else:
            self.fc = fc

        self.dmsHelper = DMSHelpers()

        self.plugin = plugin
        self.transID = transID
        self.params = {}
        self.groupSize = 0
        self.maxFiles = 0
        self.cachedLFNSize = {}
        self.transString = ''
        self.debug = debug
        if transInThread is None:
            self.transInThread = {}
        else:
            self.transInThread = transInThread

        self.log = gLogger.getSubLogger(self.plugin + self.transInThread.get(
            self.transID, ' [NoThread] [%s] ' % self.transID))
        # FIXME: This doesn't work (yet) but should soon, will allow scripts to get the context
        self.log.showHeaders(True)
Esempio n. 3
0
  def resolveCatalogPFNSizeMismatch( self, problematicDict ):
    """ This takes the problematic dictionary returned by the integrity DB and resolved the CatalogPFNSizeMismatch prognosis
    """
    lfn = problematicDict['LFN']
    se = problematicDict['SE']
    fileID = problematicDict['FileID']


    res = returnSingleResult( self.fc.getFileSize( lfn ) )
    if not res['OK']:
      return self.__returnProblematicError( fileID, res )
    catalogSize = res['Value']
    res = returnSingleResult( StorageElement( se ).getFileSize( lfn ) )
    if not res['OK']:
      return self.__returnProblematicError( fileID, res )
    storageSize = res['Value']
    bkKCatalog = FileCatalog( ['BookkeepingDB'] )
    res = returnSingleResult( bkKCatalog.getFileSize( lfn ) )
    if not res['OK']:
      return self.__returnProblematicError( fileID, res )
    bookkeepingSize = res['Value']
    if bookkeepingSize == catalogSize == storageSize:
      gLogger.info( "CatalogPFNSizeMismatch replica (%d) matched all registered sizes." % fileID )
      return self.__updateReplicaToChecked( problematicDict )
    if catalogSize == bookkeepingSize:
      gLogger.info( "CatalogPFNSizeMismatch replica (%d) found to mismatch the bookkeeping also" % fileID )
      res = returnSingleResult( self.fc.getReplicas( lfn ) )
      if not res['OK']:
        return self.__returnProblematicError( fileID, res )
      if len( res['Value'] ) <= 1:
        gLogger.info( "CatalogPFNSizeMismatch replica (%d) has no other replicas." % fileID )
        return S_ERROR( "Not removing catalog file mismatch since the only replica" )
      else:
        gLogger.info( "CatalogPFNSizeMismatch replica (%d) has other replicas. Removing..." % fileID )
        res = self.dm.removeReplica( se, lfn )
        if not res['OK']:
          return self.__returnProblematicError( fileID, res )
        return self.__updateCompletedFiles( 'CatalogPFNSizeMismatch', fileID )
    if ( catalogSize != bookkeepingSize ) and ( bookkeepingSize == storageSize ):
      gLogger.info( "CatalogPFNSizeMismatch replica (%d) found to match the bookkeeping size" % fileID )
      res = self.__updateReplicaToChecked( problematicDict )
      if not res['OK']:
        return self.__returnProblematicError( fileID, res )
      return self.changeProblematicPrognosis( fileID, 'BKCatalogSizeMismatch' )
    gLogger.info( "CatalogPFNSizeMismatch replica (%d) all sizes found mismatch. Updating retry count" % fileID )
    return self.incrementProblematicRetry( fileID )
Esempio n. 4
0
    def setUp(self):
        self.prodClient = ProductionClient()
        self.transClient = TransformationClient()
        self.fc = FileCatalog()

        # ## Add metadata fields to the DFC
        self.MDFieldDict = {
            'particle': 'VARCHAR(128)',
            'analysis_prog': 'VARCHAR(128)',
            'tel_sim_prog': 'VARCHAR(128)',
            'outputType': 'VARCHAR(128)',
            'zenith': 'int',
            'data_level': 'int'
        }
        for MDField in self.MDFieldDict:
            MDFieldType = self.MDFieldDict[MDField]
            res = self.fc.addMetadataField(MDField, MDFieldType)
            self.assert_(res['OK'])
Esempio n. 5
0
    def setUp(self):
        self.prodClient = ProductionClient()
        self.transClient = TransformationClient()
        self.fc = FileCatalog()

        # ## Add metadata fields to the DFC
        self.MDFieldDict = {
            "particle": "VARCHAR(128)",
            "analysis_prog": "VARCHAR(128)",
            "tel_sim_prog": "VARCHAR(128)",
            "outputType": "VARCHAR(128)",
            "zenith": "int",
            "data_level": "int",
        }
        for MDField in self.MDFieldDict:
            MDFieldType = self.MDFieldDict[MDField]
            res = self.fc.addMetadataField(MDField, MDFieldType)
            self.assert_(res["OK"])
Esempio n. 6
0
    def __init__(self,
                 plugin='Standard',
                 transClient=None,
                 dataManager=None,
                 fc=None,
                 debug=False,
                 transInThread=None,
                 transID=None):
        """
    c'tor

    Setting defaults
    """
        # clients
        if transClient is None:
            self.transClient = TransformationClient()
        else:
            self.transClient = transClient
        if dataManager is None:
            self.dm = DataManager()
        else:
            self.dm = dataManager
        if fc is None:
            self.fc = FileCatalog()
        else:
            self.fc = fc

        self.dmsHelper = DMSHelpers()

        self.plugin = plugin
        self.transID = transID
        self.params = {}
        self.groupSize = 0
        self.maxFiles = 0
        self.cachedLFNSize = {}
        self.transString = ''
        self.debug = debug
        self.seConfig = {}
        if transInThread is None:
            self.transInThread = {}
        else:
            self.transInThread = transInThread

        self.log = gLogger.getSubLogger("%s/PluginUtilities" % plugin)
Esempio n. 7
0
  def __proxyWrapper( self, name, args, kwargs ):
    """ The wrapper will obtain the client proxy and set it up in the environment.
        The required functionality is then executed and returned to the client.

    :param self: self reference
    :param str name: fcn name
    :param tuple args: fcn args
    :param dict kwargs: fcn keyword args 
    """
    res = self.__prepareSecurityDetails()
    if not res['OK']:
      return res
    try:
      fileCatalog = FileCatalog( ['LcgFileCatalogCombined'] )
      method = getattr( fileCatalog, name )
    except AttributeError, error:
      errStr = "LcgFileCatalogProxyHandler.__proxyWrapper: No method named %s" % name
      gLogger.exception( errStr, name, error )
      return S_ERROR( errStr )
Esempio n. 8
0
def main():
    # Registering arguments will automatically add their description to the help menu
    Script.registerArgument(("LocalFile: Path to local file containing LFNs", "LFN:       Logical File Name"))
    Script.registerArgument(" SE:        Storage Element")
    Script.registerArgument(" status:    status")
    Script.parseCommandLine()

    from DIRAC.Resources.Catalog.FileCatalog import FileCatalog

    catalog = FileCatalog()
    import os

    # parseCommandLine show help when mandatory arguments are not specified or incorrect argument
    inputFileName, se, newStatus = Script.getPositionalArgs(group=True)

    if os.path.exists(inputFileName):
        inputFile = open(inputFileName, "r")
        string = inputFile.read()
        lfns = string.splitlines()
        inputFile.close()
    else:
        lfns = [inputFileName]

    res = catalog.getReplicas(lfns, True)
    if not res["OK"]:
        print(res["Message"])
        DIRACExit(-1)
    replicas = res["Value"]["Successful"]

    lfnDict = {}
    for lfn in lfns:
        lfnDict[lfn] = {}
        lfnDict[lfn]["SE"] = se
        lfnDict[lfn]["Status"] = newStatus
        lfnDict[lfn]["PFN"] = replicas[lfn][se]

    res = catalog.setReplicaStatus(lfnDict)
    if not res["OK"]:
        print("ERROR:", res["Message"])
    if res["Value"]["Failed"]:
        print("Failed to update %d replica status" % len(res["Value"]["Failed"]))
    if res["Value"]["Successful"]:
        print("Successfully updated %d replica status" % len(res["Value"]["Successful"]))
Esempio n. 9
0
    def __init__(self, plugin, transClient=None, dataManager=None):
        """ plugin name has to be passed in: it will then be executed as one of the functions below, e.g.
        plugin = 'BySize' will execute TransformationPlugin('BySize')._BySize()
    """
        self.params = {}
        self.data = {}
        self.plugin = plugin
        self.files = False
        if transClient is None:
            self.transClient = TransformationClient()
        else:
            self.transClient = transClient

        if dataManager is None:
            self.dm = DataManager()
        else:
            self.dm = dataManager

        self.fc = FileCatalog()
def main():
  Script.parseCommandLine()

  from DIRAC.Resources.Catalog.FileCatalog import FileCatalog
  catalog = FileCatalog()
  import os
  args = Script.getPositionalArgs()
  if not len(args) == 3:
    Script.showHelp(exitCode=1)
  else:
    inputFileName = args[0]
    se = args[1]
    newStatus = args[2]

  if os.path.exists(inputFileName):
    inputFile = open(inputFileName, 'r')
    string = inputFile.read()
    lfns = string.splitlines()
    inputFile.close()
  else:
    lfns = [inputFileName]

  res = catalog.getReplicas(lfns, True)
  if not res['OK']:
    print(res['Message'])
    DIRACExit(-1)
  replicas = res['Value']['Successful']

  lfnDict = {}
  for lfn in lfns:
    lfnDict[lfn] = {}
    lfnDict[lfn]['SE'] = se
    lfnDict[lfn]['Status'] = newStatus
    lfnDict[lfn]['PFN'] = replicas[lfn][se]

  res = catalog.setReplicaStatus(lfnDict)
  if not res['OK']:
    print("ERROR:", res['Message'])
  if res['Value']['Failed']:
    print("Failed to update %d replica status" % len(res['Value']['Failed']))
  if res['Value']['Successful']:
    print("Successfully updated %d replica status" % len(res['Value']['Successful']))
Esempio n. 11
0
    def __init__(self, operation=None, csPath=None):
        """c'tor

    :param self: self reference
    :param Operation operation: Operation instance
    :param str csPath: CS path for this handler
    """
        super(ReplicateAndRegister, self).__init__(operation, csPath)
        # # own gMonitor stuff for files
        gMonitor.registerActivity("ReplicateAndRegisterAtt",
                                  "Replicate and register attempted",
                                  "RequestExecutingAgent", "Files/min",
                                  gMonitor.OP_SUM)
        gMonitor.registerActivity("ReplicateOK", "Replications successful",
                                  "RequestExecutingAgent", "Files/min",
                                  gMonitor.OP_SUM)
        gMonitor.registerActivity("ReplicateFail", "Replications failed",
                                  "RequestExecutingAgent", "Files/min",
                                  gMonitor.OP_SUM)
        gMonitor.registerActivity("RegisterOK", "Registrations successful",
                                  "RequestExecutingAgent", "Files/min",
                                  gMonitor.OP_SUM)
        gMonitor.registerActivity("RegisterFail", "Registrations failed",
                                  "RequestExecutingAgent", "Files/min",
                                  gMonitor.OP_SUM)
        # # for FTS
        gMonitor.registerActivity("FTSScheduleAtt", "Files schedule attempted",
                                  "RequestExecutingAgent", "Files/min",
                                  gMonitor.OP_SUM)
        gMonitor.registerActivity("FTSScheduleOK", "File schedule successful",
                                  "RequestExecutingAgent", "Files/min",
                                  gMonitor.OP_SUM)
        gMonitor.registerActivity("FTSScheduleFail", "File schedule failed",
                                  "RequestExecutingAgent", "Files/min",
                                  gMonitor.OP_SUM)
        # # SE cache

        # Clients
        self.fc = FileCatalog()
        if hasattr(self, "FTSMode") and getattr(self, "FTSMode"):
            from DIRAC.DataManagementSystem.Client.FTSClient import FTSClient
            self.ftsClient = FTSClient()
Esempio n. 12
0
  def __proxyWrapper( self, fcName, methodName, args, kwargs ):
    """ The wrapper will obtain the client proxy and set it up in the environment.
        The required functionality is then executed and returned to the client.

    :param self: self reference
    :param str name: fcn name
    :param tuple args: fcn args
    :param dict kwargs: fcn keyword args 
    """
    result = self.__prepareSecurityDetails()
    if not result['OK']:
      return result
    proxyLocation =result['Value']
    try:
      fileCatalog = FileCatalog( [fcName] )
      method = getattr( fileCatalog, methodName )
    except AttributeError, error:
      errStr = "%s proxy: no method named %s" % ( fcName, methodName )
      gLogger.exception( errStr, methodName, error )
      return S_ERROR( errStr )
Esempio n. 13
0
    def test_03_noLFN(self, mk_getSelectedCatalogs, mk_getEligibleCatalogs):
        """Test the no_lfn methods"""

        fc = FileCatalog(catalogs=["c1_True_True_True_2_0_2_1", "c2_False_True_True_3_0_2_1"])

        # all good
        res = fc.write2("/lhcb/toto")
        self.assertTrue(res["OK"])
        self.assertEqual(res["Value"], "yeah")

        # Fail in the master
        res = fc.write2("/lhcb/c1")
        self.assertTrue(not res["OK"])
        self.assertTrue("Value" not in res)

        # Fail in the non master
        res = fc.write2("/lhcb/c2")
        self.assertTrue(res["OK"])
        self.assertTrue("Value" in res)
        self.assertEqual(res["Value"], "yeah")
Esempio n. 14
0
  def test_03_noLFN( self, mk_getSelectedCatalogs, mk_getEligibleCatalogs ):
    """ Test the no_lfn methods """


    fc = FileCatalog( catalogs = ['c1_True_True_True_2_0_2_1', 'c2_False_True_True_3_0_2_1'] )

    # all good
    res = fc.write2( "/lhcb/toto" )
    self.assert_( res['OK'] )
    self.assertEqual( res['Value'], 'yeah' )

    # Fail in the master
    res = fc.write2( "/lhcb/c1" )
    self.assert_( not res['OK'] )
    self.assert_( not 'Value' in res )

    # Fail in the non master
    res = fc.write2( "/lhcb/c2" )
    self.assert_( res['OK'] )
    self.assert_( 'Value' in res )
    self.assertEqual( res['Value'], 'yeah' )
Esempio n. 15
0
    def setUp(self):
        self.dirac = Dirac()
        csAPI = CSAPI()

        self.lfn5 = os.path.join(DESTINATION_PATH, 'test_file_10MB_v5.bin')
        self.dir5 = os.path.dirname(self.lfn5)
        # local file, for now:
        self.fname = os.path.basename(self.lfn5)
        random_dd(self.fname, 10)
        self.diracSE = 'SE-1'
        try:
            self.fc = FileCatalog(['MultiVOFileCatalog'])
        except Exception:
            self.fail(
                " FileCatalog(['MultiVOFileCatalog']) raised Exception unexpectedly!\n"
                + traceback.format_exc())
            return
        # add a replica
        self.fileadded = self.dirac.addFile(self.lfn5, self.fname,
                                            self.diracSE)
        self.assertTrue(self.fileadded['OK'])
Esempio n. 16
0
  def __init__(self, *args, **kwargs):
    """ c'tor
    """
    AgentModule.__init__(self, *args, **kwargs)

    self.consistencyInspector = ConsistencyInspector()
    self.integrityClient = DataIntegrityClient()
    self.fc = FileCatalog()
    self.transClient = TransformationClient()
    self.fileCatalogClient = FileCatalogClient()

    agentTSTypes = self.am_getOption('TransformationTypes', [])
    if agentTSTypes:
      self.transformationTypes = agentTSTypes
    else:
      self.transformationTypes = Operations().getValue('Transformations/DataProcessing', ['MCSimulation', 'Merge'])

    self.directoryLocations = sorted(self.am_getOption('DirectoryLocations', ['TransformationDB',
                                                                              'MetadataCatalog']))
    self.transfidmeta = self.am_getOption('TransfIDMeta', "TransformationID")
    self.enableFlag = True
def syncDestinations(upload, source_dir, dest_dir, storage, delete, nthreads ):
  """
  Top level wrapper to execute functions
  """

  fc = FileCatalog()
  dm = DataManager()

  result = getContentToSync(upload,fc,source_dir,dest_dir)
  if not result['OK']:
    return S_ERROR(result['Message'])

  if upload:
    res = doUpload(fc, dm, result, source_dir, dest_dir, storage, delete, nthreads)
    if not res['OK']:
      return S_ERROR('Upload failed: ' + res['Message'])
  else:
    res = doDownload(dm, result, source_dir, dest_dir, delete, nthreads)
    if not res['OK']:
      return S_ERROR('Download failed: ' + res['Message'])

  return S_OK('Mirroring successfully finished')
Esempio n. 18
0
    def __init__(self, fromDict=None):
        """c'tor

    :param self: self reference
    :param dict fromDict: data dict
    """
        self.__data__ = dict.fromkeys(self.tableDesc()["Fields"].keys(), None)

        now = datetime.datetime.utcnow().replace(microsecond=0)
        self.__data__["CreationTime"] = now
        self.__data__["SubmitTime"] = now
        self.__data__["LastUpdate"] = now
        self.__data__["Status"] = "Submitted"
        self.__data__["Completeness"] = 0
        self.__data__["FTSJobID"] = 0
        self._regTime = 0.
        self._regSuccess = 0
        self._regTotal = 0
        self.__files__ = TypedList(allowedTypes=FTSFile)

        self._fc = FileCatalog()
        self._fts3context = None

        self._states = tuple(
            set(self.INITSTATES + self.TRANSSTATES + self.FAILEDSTATES +
                self.FINALSTATES))

        fromDict = fromDict if fromDict else {}
        for ftsFileDict in fromDict.get("FTSFiles", []):
            self += FTSFile(ftsFileDict)
        if "FTSFiles" in fromDict:
            del fromDict["FTSFiles"]
        for key, value in fromDict.items():
            if key not in self.__data__:
                raise AttributeError("Unknown FTSJob attribute '%s'" % key)
            if value:
                setattr(self, key, value)
        self._log = gLogger.getSubLogger(
            "req_%s/FTSJob-%s" % (self.RequestID, self.FTSGUID), True)
Esempio n. 19
0
    def __init__(self, plugin, transClient=None, dataManager=None, fc=None):
        """Constructor of the TransformationPlugin.

        Instantiate clients, if not given, and set up the PluginUtilities.
        """
        super(TransformationPlugin, self).__init__(plugin)

        self.data = {}
        self.files = False
        self.startTime = time.time()
        self.valid = False

        if transClient is None:
            transClient = TransformationClient()

        if dataManager is None:
            dataManager = DataManager()

        if fc is None:
            fc = FileCatalog()

        self.util = PluginUtilities(plugin, transClient=transClient, dataManager=dataManager, fc=fc)
Esempio n. 20
0
    def __init__(self, *args, **kwargs):
        ''' c'tor
    '''
        AgentModule.__init__(self, *args, **kwargs)

        self.__baseDir = '/lhcb'
        self.__baseDirLabel = "_".join(List.fromChar(self.__baseDir, "/"))
        self.__ignoreDirsList = []
        self.__keepDirLevels = 4

        self.__startExecutionTime = long(time.time())
        self.__dirExplorer = DirectoryExplorer(reverse=True)
        self.__processedDirs = 0
        self.__directoryOwners = {}
        self.catalog = FileCatalog()
        self.__maxToPublish = self.am_getOption('MaxDirectories', 5000)
        if self.am_getOption('DirectDB', False):
            self.storageUsage = StorageUsageDB()
        else:
            # Set a timeout of 0.1 seconds per directory (factor 5 margin)
            self.storageUsage = RPCClient('DataManagement/StorageUsage',
                                          timeout=self.am_getOption(
                                              'Timeout',
                                              int(self.__maxToPublish * 0.1)))
        self.activePeriod = self.am_getOption('ActivePeriod',
                                              self.activePeriod)
        self.dataLock = threading.Lock()
        self.replicaListLock = threading.Lock()
        self.proxyCache = DictCache(removeProxy)
        self.__noProxy = set()
        self.__catalogType = None
        self.__recalculateUsage = Operations().getValue(
            'DataManagement/RecalculateDirSize', False)
        self.enableStartupSleep = self.am_getOption('EnableStartupSleep',
                                                    self.enableStartupSleep)
        self.__publishDirQueue = {}
        self.__dirsToPublish = {}
        self.__replicaFilesUsed = set()
        self.__replicaListFilesDir = ""
Esempio n. 21
0
  def test_02_condParser( self, mk_getSelectedCatalogs, mk_getEligibleCatalogs ):
    """Test behavior of write methode when using FCConditionParser"""

    fc = FileCatalog( catalogs = ['c1_True_True_True_2_0_2_0', 'c2_False_True_True_3_0_1_0', 'c3_False_True_True_3_0_1_0'] )

    # No condition for c3, so it should always pass
    fcConditions = { 'c1' : "Filename=find('c1_pass')",
                     'c2' : "Filename=find('c2_pass')"}


    # Everything pass everywhere
    lfn1 = '/lhcb/c1_pass/c2_pass/lfn1'
    lfn2 = '/lhcb/c1_pass/c2_pass/lfn2'
    res = fc.write1( [lfn1, lfn2],
                     fcConditions = fcConditions )
    self.assert_( res['OK'] )
    self.assertEqual( sorted( res['Value']['Successful'] ), sorted( [lfn1, lfn2] ) )
    self.assertEqual( sorted( res['Value']['Successful'][lfn1] ), sorted( ['c1', 'c2', 'c3'] ) )
    self.assertEqual( sorted( res['Value']['Successful'][lfn2] ), sorted( ['c1', 'c2', 'c3'] ) )
    self.assert_( not res['Value']['Failed'] )

    # Everything pass for the master, only lfn2 for c2
    lfn1 = '/lhcb/c1_pass/lfn1'
    lfn2 = '/lhcb/c1_pass/c2_pass/lfn2'
    res = fc.write1( [lfn1, lfn2],
                     fcConditions = fcConditions )
    self.assert_( res['OK'] )
    self.assertEqual( sorted( res['Value']['Successful'] ), sorted( [lfn1, lfn2] ) )
    self.assertEqual( sorted( res['Value']['Successful'][lfn1] ) , ['c1', 'c3'] )
    self.assertEqual( sorted( res['Value']['Successful'][lfn2] ), sorted( ['c1', 'c2','c3'] ) )
    self.assert_( not res['Value']['Failed'] )


    # One is not valid for the master, so we do nothing
    lfn1 = '/lhcb/c2_pass/lfn1'
    lfn2 = '/lhcb/c1_pass/c2_pass/lfn2'
    res = fc.write1( [lfn1, lfn2],
                     fcConditions = fcConditions )
    self.assert_( not res['OK'] )
Esempio n. 22
0
    def __init__(self, plugin, transClient=None, dataManager=None, fc=None):
        """ plugin name has to be passed in: it will then be executed as one of the functions below, e.g.
        plugin = 'BySize' will execute TransformationPlugin('BySize')._BySize()
    """
        super(TransformationPlugin, self).__init__(plugin)

        self.data = {}
        self.files = False
        self.startTime = time.time()

        if transClient is None:
            transClient = TransformationClient()

        if dataManager is None:
            dataManager = DataManager()

        if fc is None:
            fc = FileCatalog()

        self.util = PluginUtilities(plugin,
                                    transClient=transClient,
                                    dataManager=dataManager,
                                    fc=fc)
Esempio n. 23
0
    def resolveBKReplicaYes(self, problematicDict):
        """ This takes the problematic dictionary returned by the integrity DB and resolved the BKReplicaYes prognosis
    """
        lfn = problematicDict['LFN']
        fileID = problematicDict['FileID']

        res = returnSingleResult(self.fc.exists(lfn))
        if not res['OK']:
            return self.__returnProblematicError(fileID, res)
        removeBKFile = False
        # If the file does not exist in the catalog
        if not res['Value']:
            gLogger.info(
                "BKReplicaYes file (%d) does not exist in the catalog. Removing..."
                % fileID)
            removeBKFile = True
        else:
            gLogger.info(
                "BKReplicaYes file (%d) found to exist in the catalog" %
                fileID)
            # If the file has no replicas in the catalog
            res = returnSingleResult(self.fc.getReplicas(lfn))
            if (not res['OK']) and (res['Message']
                                    == 'File has zero replicas'):
                gLogger.info(
                    "BKReplicaYes file (%d) found to exist without replicas. Removing..."
                    % fileID)
                removeBKFile = True
        if removeBKFile:
            # Remove the file from the BK because it does not exist
            res = returnSingleResult(
                FileCatalog(catalogs=['BookkeepingDB']).removeFile(lfn))
            if not res['OK']:
                return self.__returnProblematicError(fileID, res)
            gLogger.info("BKReplicaYes file (%d) removed from bookkeeping" %
                         fileID)
        return self.__updateCompletedFiles('BKReplicaYes', fileID)
Esempio n. 24
0
def syncDestinations(upload, source_dir, dest_dir, storage, delete, nthreads):
    """
    Top level wrapper to execute functions
    """
    from DIRAC.Resources.Catalog.FileCatalog import FileCatalog
    from DIRAC.DataManagementSystem.Client.DataManager import DataManager

    fc = FileCatalog()
    dm = DataManager()

    result = getContentToSync(upload, fc, source_dir, dest_dir)
    if not result["OK"]:
        return S_ERROR(result["Message"])

    if upload:
        res = doUpload(fc, dm, result, source_dir, dest_dir, storage, delete, nthreads)
        if not res["OK"]:
            return S_ERROR("Upload failed: " + res["Message"])
    else:
        res = doDownload(dm, result, source_dir, dest_dir, delete, nthreads)
        if not res["OK"]:
            return S_ERROR("Download failed: " + res["Message"])

    return S_OK("Mirroring successfully finished")
Esempio n. 25
0
def main():
    # Registering arguments will automatically add their description to the help menu
    Script.registerArgument("GUIDs: GUIDs separated by a comma")
    Script.parseCommandLine()

    import DIRAC
    from DIRAC import gLogger

    # parseCommandLine show help when mandatory arguments are not specified or incorrect argument
    args = Script.getPositionalArgs()
    guids = args[0]

    try:
        guids = guids.split(",")
    except Exception:
        pass

    from DIRAC.Resources.Catalog.FileCatalog import FileCatalog

    fc = FileCatalog()
    res = fc.getLFNForGUID(guids)
    if not res["OK"]:
        gLogger.error("Failed to get the LFNs", res["Message"])
        DIRAC.exit(-2)

    errorGuid = {}
    for guid, reason in res["Value"]["Failed"].items():
        errorGuid.setdefault(reason, []).append(guid)

    for error, guidList in errorGuid.items():
        gLogger.notice("Error '%s' for guids %s" % (error, guidList))

    for guid, lfn in res["Value"]["Successful"].items():
        gLogger.notice("%s -> %s" % (guid, lfn))

    DIRAC.exit(0)
Esempio n. 26
0
def main():
    # args = arguments()

    # Strip arguments so command below doesn't throw error
    # DIRAC does not work otherwise
    sys.argv = [sys.argv[0]]
    from DIRAC.Core.Base import Script
    Script.parseCommandLine(ignoreErrors=True)

    if not args.dest.endswith("/") or not args.lfnpath.endswith("/"):
        raise Exception("Destination and/or LFN Path must be a directory " +
                        "ending with a '/'")

    gfal = gfal2.creat_context()
    # # dirac = Dirac()
    fcat = FileCatalog()

    if is_dir(gfal, args.source):
        files = list_files(gfal, args.source)
        print(files)
    else:
        files = [args.source]

    if args.transfer and not args.register:  # Transfer only
        gfal.mkdir_rec(args.dest, 0755)
        transfer(gfal, files, args.dest)

    elif args.register and not args.transfer:  # Register only
        register(fcat, gfal, files, args.se, args.lfnpath)

    else:  # Transfer AND Register
        gfal.mkdir_rec(args.dest, 0755)

        regfiles = transfer(gfal, files, args.dest)

        register(fcat, gfal, regfiles, args.se, args.lfnpath)
Esempio n. 27
0
  def setUp(self):
    self.fullMetadata = ['Status', 'ChecksumType', 'OwnerRole', 'CreationDate', 'Checksum', 'ModificationDate', 'OwnerDN', 'Mode', 'GUID', 'Size']
    self.dirMetadata = self.fullMetadata + ['NumberOfSubPaths']
    self.fileMetadata = self.fullMetadata + ['NumberOfLinks']

    self.catalog = FileCatalog(catalogs=[catalogClientToTest])
    valid = self.catalog.isOK()
    self.assert_(valid)
    self.destDir = '/lhcb/test/unit-test/TestCatalogPlugin'
    self.link = "%s/link" % self.destDir

    # Clean the existing directory
    self.cleanDirectory()
    res = self.catalog.createDirectory(self.destDir)
    returnValue = self.parseResult(res,self.destDir)

    # Register some files to work with
    self.numberOfFiles = 2
    self.files = []
    for i in range(self.numberOfFiles):
      lfn = "%s/testFile_%d" % (self.destDir,i)
      res = self.registerFile(lfn)
      self.assert_(res)
      self.files.append(lfn)
Esempio n. 28
0
 def __transferIfNotRegistered(self, file, transferDict):
     result = self.isRegisteredInOutputCatalog(file, transferDict)
     if not result['OK']:
         self.log.error(result['Message'])
         return result
     #Already registered. Need to delete
     if result['Value']:
         self.log.info(
             "Transfer file %s is already registered in the output catalog"
             % file)
         #Delete
         filePath = os.path.join(transferDict['InputPath'], file)
         if transferDict['InputFC'] == 'LocalDisk':
             os.unlink(filePath)
         else:
             inputFC = FileCatalog([transferDict['InputFC']])
             replicaDict = inputFC.getReplicas(filePath)
             if not replicaDict['OK']:
                 self.log.error("Error deleting file",
                                replicaDict['Message'])
             elif not inFile in replicaDict['Value']['Successful']:
                 self.log.error("Error deleting file",
                                replicaDict['Value']['Failed'][inFile])
             else:
                 seList = replicaDict['Value']['Successful'][inFile].keys()
                 for se in seList:
                     se = StorageElement(se)
                     self.log.info('Removing from %s:' % se.name, inFile)
                     se.removeFile(inFile)
                 inputFC.removeFile(file)
         self.log.info("File %s deleted from %s" %
                       (file, transferDict['InputFC']))
         self.__processingFiles.discard(file)
         return S_OK(file)
     #Do the transfer
     return self.__retrieveAndUploadFile(file, transferDict)
Esempio n. 29
0
def filterReplicas( opFile, logger = None, dataManager = None ):
  """ filter out banned/invalid source SEs """

  if logger is None:
    logger = gLogger
  if dataManager is None:
    dataManager = DataManager()

  log = logger.getSubLogger( "filterReplicas" )
  ret = { "Valid" : [], "NoMetadata" : [], "Bad" : [], 'NoReplicas':[], 'NoPFN':[] }

  replicas = dataManager.getActiveReplicas( opFile.LFN )
  if not replicas["OK"]:
    log.error( 'Failed to get active replicas', replicas["Message"] )
    return replicas
  reNotExists = re.compile( r".*such file.*" )
  replicas = replicas["Value"]
  failed = replicas["Failed"].get( opFile.LFN , "" )
  if reNotExists.match( failed.lower() ):
    opFile.Status = "Failed"
    opFile.Error = failed
    return S_ERROR( failed )

  replicas = replicas["Successful"].get( opFile.LFN, {} )
  noReplicas = False
  if not replicas:
    allReplicas = dataManager.getReplicas( opFile.LFN )
    if allReplicas['OK']:
      allReplicas = allReplicas['Value']['Successful'].get( opFile.LFN, {} )
      if not allReplicas:
        ret['NoReplicas'].append( None )
        noReplicas = True
      else:
        # We try inactive replicas to see if maybe the file doesn't exist at all
        replicas = allReplicas
      log.warn( "File has no%s replica in File Catalog" % ( '' if noReplicas else ' active' ), opFile.LFN )
    else:
      return allReplicas

  if not opFile.Checksum:
    # Set Checksum to FC checksum if not set in the request
    fcMetadata = FileCatalog().getFileMetadata( opFile.LFN )
    fcChecksum = fcMetadata.get( 'Value', {} ).get( 'Successful', {} ).get( opFile.LFN, {} ).get( 'Checksum' )
    # Replace opFile.Checksum if it doesn't match a valid FC checksum
    if fcChecksum:
      opFile.Checksum = fcChecksum
      opFile.ChecksumType = fcMetadata['Value']['Successful'][opFile.LFN].get( 'ChecksumType', 'Adler32' )

  for repSEName in replicas:
    repSEMetadata = StorageElement( repSEName ).getFileMetadata( opFile.LFN )
    error = repSEMetadata.get( 'Message', repSEMetadata.get( 'Value', {} ).get( 'Failed', {} ).get( opFile.LFN ) )
    if error:
      log.warn( 'unable to get metadata at %s for %s' % ( repSEName, opFile.LFN ), error.replace( '\n', '' ) )
      if 'File does not exist' in error:
        ret['NoReplicas'].append( repSEName )
      else:
        ret["NoMetadata"].append( repSEName )
    elif not noReplicas:
      repSEMetadata = repSEMetadata['Value']['Successful'][opFile.LFN]

      seChecksum = repSEMetadata.get( "Checksum" )
      if not seChecksum and opFile.Checksum:
        opFile.Checksum = None
        opFile.ChecksumType = None
      elif seChecksum and not opFile.Checksum:
        opFile.Checksum = seChecksum
      if not opFile.Checksum or not seChecksum or compareAdler( seChecksum, opFile.Checksum ):
        # # All checksums are OK
        ret["Valid"].append( repSEName )
      else:
        log.warn( " %s checksum mismatch, FC: '%s' @%s: '%s'" % ( opFile.LFN,
                                                              opFile.Checksum,
                                                              repSEName,
                                                              seChecksum ) )
        ret["Bad"].append( repSEName )
    else:
      # If a replica was found somewhere, don't set the file as no replicas
      ret['NoReplicas'] = []

  return S_OK( ret )
Esempio n. 30
0
        years = int(switch[1])
    if switch[0].lower() == "w" or switch[0].lower() == "wildcard":
        wildcard = switch[1]
    if switch[0].lower() == "b" or switch[0].lower() == "basedir":
        baseDir = switch[1]
    if switch[0].lower() == "e" or switch[0].lower() == "emptydirs":
        emptyDirsFlag = True

import DIRAC
from DIRAC import gLogger
from DIRAC.ConfigurationSystem.Client.Helpers.Registry import getVOForGroup
from DIRAC.Core.Security.ProxyInfo import getProxyInfo
from DIRAC.Resources.Catalog.FileCatalog import FileCatalog
from datetime import datetime, timedelta
import fnmatch
fc = FileCatalog()


def isOlderThan(cTimeStruct, days):
    timeDelta = timedelta(days=days)
    maxCTime = datetime.utcnow() - timeDelta
    if cTimeStruct < maxCTime:
        return True
    return False


verbose = False
if days or months or years:
    verbose = True
totalDays = 0
if years: