def getFile(lfn, se=""):
    dm = DataManager()

    download_ok = 0
    get_active_replicas_ok = False
    lfn_on_se = False
    error_msg = ""
    if se:
        for i in range(0, 5):
            result = dm.getActiveReplicas(lfn)
            if result["OK"] and result["Value"]["Successful"]:
                get_active_replicas_ok = True
                lfnReplicas = result["Value"]["Successful"]
                if se in lfnReplicas[lfn]:
                    lfn_on_se = True
                    break
            time.sleep(3)
            print "- Get replicas for %s failed, try again" % lfn

        if not get_active_replicas_ok:
            return S_ERROR("Get replicas error: %s" % lfn)

    if lfn_on_se:
        se = StorageElement(se)
        # try 5 times
        for j in range(0, 5):
            result = se.getFile(lfn)
            if result["OK"] and result["Value"]["Successful"] and result["Value"]["Successful"].has_key(lfn):
                break
            time.sleep(random.randint(180, 600))
            print "- %s getStorageFile(%s) failed, try again" % (lfn, se)
        if result["OK"]:
            if result["Value"]["Successful"] and result["Value"]["Successful"].has_key(lfn):
                download_ok = 1
            else:
                error_msg = "Downloading %s from SE %s error!" % (lfn, se)
        else:
            error_msg = result["Message"]
    else:
        if se:
            print 'File %s not found on SE "%s" after %s tries, trying other SE' % (lfn, se, i + 1)
        # try 5 times
        for j in range(0, 5):
            result = dm.getFile(lfn)
            if result["OK"] and result["Value"]["Successful"] and result["Value"]["Successful"].has_key(lfn):
                break
            time.sleep(random.randint(180, 600))
            print "- getFile(%s) failed, try again" % lfn
        if result["OK"]:
            if result["Value"]["Successful"] and result["Value"]["Successful"].has_key(lfn):
                download_ok = 2
            else:
                error_msg = "Downloading %s from random SE error!" % lfn
        else:
            error_msg = result["Message"]

    if download_ok:
        return S_OK({lfn: {"DownloadOK": download_ok, "Retry": j + 1}})

    return S_ERROR(error_msg)
 def __init__(self):
     super(WhizardAnalysis, self).__init__()
     self.enable = True
     self.STEP_NUMBER = ''
     self.debug = True
     self.log = gLogger.getSubLogger("WhizardAnalysis")
     self.SteeringFile = ''
     self.OutputFile = ''
     self.NumberOfEvents = 1
     self.Lumi = 0
     self.applicationName = 'whizard'
     self.evttype = ""
     self.RandomSeed = 0
     self.getProcessInFile = False
     self.datMan = DataManager()
     self.processlist = None
     self.parameters = {}
     self.susymodel = 0
     self.Model = ''
     self.genmodel = GeneratorModels()
     self.eventstring = [
         '! ', 'Fatal error:', 'PYSTOP', 'No matrix element available',
         'Floating point exception', 'Event generation finished.',
         " n_events", "luminosity", "  sum            "
     ]
     self.excludeAllButEventString = False
     self.steeringparameters = ''
     self.options = None
     self.optionsdict = {}
     self.OptionsDictStr = ''
     self.GenLevelCutDictStr = ''
     self.genlevelcuts = {}
     self.willCut = False
     self.useGridFiles = False
 def __init__(self, soft_category):
     """ Constructor
     """
     self.CVMFS_DIR = '/cvmfs/sw.cta-observatory.org/software'
     self.LFN_ROOT = '/vo.cta.in2p3.fr/software'
     self.SOFT_CATEGORY_DICT = soft_category
     self.dm = DataManager()
Exemple #4
0
    def __init__(self, **kwargs):
        GridBackend.__init__(self, catalogue_prefix='', **kwargs)

        from DIRAC.Core.Base import Script
        Script.initialize()
        from DIRAC.FrameworkSystem.Client.ProxyManagerClient import ProxyManagerClient
        self.pm = ProxyManagerClient()

        proxy = self.pm.getUserProxiesInfo()
        if not proxy['OK']:
            raise BackendException("Proxy error.")

        from DIRAC.Interfaces.API.Dirac import Dirac
        self.dirac = Dirac()

        from DIRAC.Resources.Catalog.FileCatalog import FileCatalog
        self.fc = FileCatalog()
        from DIRAC.DataManagementSystem.Client.DataManager import DataManager
        self.dm = DataManager()

        self._xattr_cmd = sh.Command('gfal-xattr').bake(_tty_out=False)
        self._replica_checksum_cmd = sh.Command('gfal-sum').bake(_tty_out=False)
        self._bringonline_cmd = sh.Command('gfal-legacy-bringonline').bake(_tty_out=False)
        self._cp_cmd = sh.Command('gfal-copy').bake(_tty_out=False)
        self._ls_se_cmd = sh.Command('gfal-ls').bake(color='never', _tty_out=False)
        self._move_cmd = sh.Command('gfal-rename').bake(_tty_out=False)
        self._mkdir_cmd = sh.Command('gfal-mkdir').bake(_tty_out=False)

        self._replicate_cmd = sh.Command('dirac-dms-replicate-lfn').bake(_tty_out=False)
        self._add_cmd = sh.Command('dirac-dms-add-file').bake(_tty_out=False)
Exemple #5
0
 def __copyToExternalSE(self, localFilePath, sbPath):
     """
 Copy uploaded file to external SE
 """
     try:
         dm = DataManager()
         result = dm.put(sbPath, localFilePath, self.__externalSEName)
         if not result['OK']:
             return result
         if 'Successful' not in result['Value']:
             gLogger.verbose("Oops, no successful transfers there",
                             str(result))
             return S_ERROR(
                 "RM returned OK to the action but no successful transfers were there"
             )
         okTrans = result['Value']['Successful']
         if sbPath not in okTrans:
             gLogger.verbose(
                 "Ooops, SB transfer wasn't in the successful ones",
                 str(result))
             return S_ERROR(
                 "RM returned OK to the action but SB transfer wasn't in the successful ones"
             )
         return S_OK((self.__externalSEName, okTrans[sbPath]))
     except Exception as e:
         gLogger.error("Error while moving sandbox to SE",
                       "%s" % repr(e).replace(',)', ')'))
         return S_ERROR("Error while moving sandbox to SE")
Exemple #6
0
 def __init__(self):
   super(OverlayInput, self).__init__()
   self.enable = True
   self.STEP_NUMBER = ''
   self.log = gLogger.getSubLogger( "OverlayInput" )
   self.applicationName = 'OverlayInput'
   self.curdir = os.getcwd()
   self.applicationLog = ''
   self.printoutflag = ''
   self.prodid = 0
   self.detector = '' ##needed for backward compatibility
   self.detectormodel = ""
   self.energytouse = ''
   self.energy = 0
   self.nbofeventsperfile = 100
   self.lfns = []
   self.nbfilestoget = 0
   self.BkgEvtType = 'gghad'
   self.metaEventType = self.BkgEvtType
   self.BXOverlay = 0
   self.ggtohadint = 3.2
   self.nbsigeventsperfile = 0
   self.nbinputsigfile = 1
   self.NbSigEvtsPerJob = 0
   self.datMan = DataManager()
   self.fcc = FileCatalogClient()
   self.site = DIRAC.siteName()
   self.useEnergyForFileLookup = True
   self.machine = 'clic_cdr'
   self.pathToOverlayFiles = ''
   self.processorName = ''
def main():
  Script.parseCommandLine()

  args = Script.getPositionalArgs()
  if len(args) != 1:
    Script.showHelp(exitCode=1)

  inputFileName = args[0]

  if os.path.exists(inputFileName):
    lfns = [lfn.strip().split()[0] for lfn in sorted(open(inputFileName, 'r').read().splitlines())]
  else:
    lfns = [inputFileName]

  from DIRAC.DataManagementSystem.Client.DataManager import DataManager
  dm = DataManager()
  retVal = 0
  for lfn in [lfn for lfn in lfns if lfn]:
    gLogger.notice("Cleaning directory %r ... " % lfn)
    result = dm.cleanLogicalDirectory(lfn)
    if not result['OK']:
      gLogger.error('Failed to clean directory', result['Message'])
      retVal = -1
    else:
      if not result['Value']['Failed']:
        gLogger.notice('OK')
      else:
        for folder, message in result['Value']['Failed'].items():
          gLogger.error('Failed to clean folder', "%r: %s" % (folder, message))
          retVal = -1

    DIRACExit(retVal)
Exemple #8
0
  def uploadProcessListToFileCatalog(self, path_to_process_list, appVersion):
    """Upload the new processList to the FileCatalog
    """
    from ILCDIRAC.Core.Utilities.FileUtils                       import upload
    from DIRAC.DataManagementSystem.Client.DataManager           import DataManager
    from DIRAC import gConfig, exit as dexit

    datMan = DataManager()
    LOG.notice("Removing process list from file catalog" + path_to_process_list)
    res = datMan.removeFile(path_to_process_list)
    if not res['OK']:
      LOG.error("Could not remove process list from file catalog, do it by hand")
      dexit(2)
    LOG.notice("Done removing process list from file catalog")

    res = upload(os.path.dirname(path_to_process_list) + "/", self.location )
    if not res['OK']:
      LOG.error("something went wrong in the copy")
      dexit(2)

    LOG.notice("Putting process list to local processlist directory")
    localprocesslistpath = gConfig.getOption("/LocalSite/ProcessListPath", "")
    if localprocesslistpath['Value']:

      try:
        localSvnRepo = "/afs/cern.ch/eng/clic/software/whizard/whizard_195/"
        shutil.copy(self.location, localSvnRepo) ## because it does not make a difference if we hardcode it here or in ${DIRAC}/etc/dirac.cfg, yours truly APS, JFS
      except OSError, err:
        LOG.error("Copy of process list to %s failed with error %s!" % (localSvnRepo, str(err)))

      try:
        subprocess.call( ["svn","ci", os.path.join( localSvnRepo, os.path.basename(localprocesslistpath['Value'] )), "-m'Process list for whizard version %s'" % appVersion ], shell=False )
      except OSError, err:
        LOG.error("Commit failed! Error: %s" % str(err))
Exemple #9
0
  def uploadProcessListToFileCatalog(self, path_to_process_list, appVersion):
    """Upload the new processList to the FileCatalog
    """
    from ILCDIRAC.Core.Utilities.FileUtils                       import upload
    from DIRAC.DataManagementSystem.Client.DataManager           import DataManager
    from DIRAC import gConfig, exit as dexit

    datMan = DataManager()
    gLogger.notice("Removing process list from file catalog" + path_to_process_list)
    res = datMan.removeFile(path_to_process_list)
    if not res['OK']:
      gLogger.error("Could not remove process list from file catalog, do it by hand")
      dexit(2)
    gLogger.notice("Done removing process list from file catalog")

    res = upload(os.path.dirname(path_to_process_list) + "/", self.location )
    if not res['OK']:
      gLogger.error("something went wrong in the copy")
      dexit(2)

    gLogger.notice("Putting process list to local processlist directory")
    localprocesslistpath = gConfig.getOption("/LocalSite/ProcessListPath", "")
    if localprocesslistpath['Value']:

      try:
        localSvnRepo = "/afs/cern.ch/eng/clic/software/whizard/whizard_195/"
        shutil.copy(self.location, localSvnRepo) ## because it does not make a difference if we hardcode it here or in ${DIRAC}/etc/dirac.cfg, yours truly APS, JFS
      except OSError, err:
        gLogger.error("Copy of process list to %s failed with error %s!" % (localSvnRepo, str(err)))

      try:
        subprocess.call( ["svn","ci", os.path.join( localSvnRepo, os.path.basename(localprocesslistpath['Value'] )), "-m'Process list for whizard version %s'" % appVersion ], shell=False )
      except OSError, err:
        gLogger.error("Commit failed! Error: %s" % str(err))
def getFilesToStage( lfnList ):
  """ Utility that returns out of a list of LFNs those files that are offline,
      and those for which at least one copy is online
  """
  onlineLFNs = set()
  offlineLFNsDict = {}

  if not lfnList:
    return S_OK( {'onlineLFNs':list( onlineLFNs ), 'offlineLFNs': offlineLFNsDict} )

  dm = DataManager()

  lfnListReplicas = dm.getActiveReplicas( lfnList, getUrl = False )
  if not lfnListReplicas['OK']:
    return lfnListReplicas

  seToLFNs = dict()

  if lfnListReplicas['Value']['Failed']:
    return S_ERROR( "Failures in getting replicas" )
  for lfn, ld in lfnListReplicas['Value']['Successful'].iteritems():
    for se in ld:
      seToLFNs.setdefault( se, list() ).append( lfn )

  failed = {}
  for se, lfnsInSEList in seToLFNs.iteritems():
    fileMetadata = StorageElement( se ).getFileMetadata( lfnsInSEList )
    if not fileMetadata['OK']:
      failed[se] = dict.fromkeys( lfnsInSEList, fileMetadata['Message'] )
    else:
      failed[se] = fileMetadata['Value']['Failed']
      # is there at least one online?
      for lfn, mDict in fileMetadata['Value']['Successful'].iteritems():
        if mDict['Cached']:
          onlineLFNs.add( lfn )

  # If the file was found staged, ignore possible errors, but print out errors
  if failed:
    for se, seFailed in failed.items():
      gLogger.error( "Errors when getting files metadata", 'at %s' % se )
      for lfn, reason in seFailed.items():
        gLogger.info( '%s: %s' % ( lfn, reason ) )
        if lfn in onlineLFNs:
          failed[se].pop( lfn )
      if not failed[se]:
        failed.pop( se )
    if failed:
      return S_ERROR( 'Could not get metadata for %d files' % \
                      len( set( [lfn for lfnList in failed.values() for lfn in lfnList] ) ) )
  offlineLFNs = set( lfnList ) - onlineLFNs


  for offlineLFN in offlineLFNs:
    ses = lfnListReplicas['Value']['Successful'][offlineLFN].keys()
    random.shuffle( ses )
    se = ses[0]
    offlineLFNsDict.setdefault( se, list() ).append( offlineLFN )

  return S_OK( {'onlineLFNs':list( onlineLFNs ), 'offlineLFNs': offlineLFNsDict} )
Exemple #11
0
def getFilesToStage( lfnList ):
  """ Utility that returns out of a list of LFNs those files that are offline,
      and those for which at least one copy is online
  """
  onlineLFNs = set()
  offlineLFNsDict = {}

  if not lfnList:
    return S_OK( {'onlineLFNs':list( onlineLFNs ), 'offlineLFNs': offlineLFNsDict} )

  dm = DataManager()

  lfnListReplicas = dm.getActiveReplicas( lfnList, getUrl = False )
  if not lfnListReplicas['OK']:
    return lfnListReplicas

  seToLFNs = dict()

  if lfnListReplicas['Value']['Failed']:
    return S_ERROR( "Failures in getting replicas" )
  for lfn, ld in lfnListReplicas['Value']['Successful'].iteritems():
    for se in ld:
      seToLFNs.setdefault( se, list() ).append( lfn )

  failed = {}
  for se, lfnsInSEList in seToLFNs.iteritems():
    fileMetadata = StorageElement( se ).getFileMetadata( lfnsInSEList )
    if not fileMetadata['OK']:
      failed[se] = dict.fromkeys( lfnsInSEList, fileMetadata['Message'] )
    else:
      if fileMetadata['Value']['Failed']:
        failed[se] = fileMetadata['Value']['Failed']
      # is there at least one online?
      for lfn, mDict in fileMetadata['Value']['Successful'].iteritems():
        if 'Cached' not in mDict:
          failed.setdefault( se, {} )[lfn] = 'No Cached item returned as metadata'
        elif mDict['Cached']:
          onlineLFNs.add( lfn )

  # If the file was found staged, ignore possible errors, but print out errors
  for se, seFailed in failed.items():
    gLogger.error( "Errors when getting files metadata", 'at %s' % se )
    for lfn, reason in seFailed.items():
      gLogger.info( '%s: %s' % ( lfn, reason ) )
      if lfn in onlineLFNs:
        failed[se].pop( lfn )
    if not failed[se]:
      failed.pop( se )
  if failed:
    gLogger.error( "Could not get metadata", "for %d files" % len( set( [lfn for lfnList in failed.values() for lfn in lfnList] ) ) )
    return S_ERROR( "Could not get metadata for files" )
  offlineLFNs = set( lfnList ) - onlineLFNs

  for offlineLFN in offlineLFNs:
    ses = lfnListReplicas['Value']['Successful'][offlineLFN].keys()
    if ses:
      offlineLFNsDict.setdefault( random.choice( ses ), list() ).append( offlineLFN )

  return S_OK( {'onlineLFNs':list( onlineLFNs ), 'offlineLFNs': offlineLFNsDict} )
def doesFileExist(lfn):
    from DIRAC.DataManagementSystem.Client.DataManager import DataManager
    dm = DataManager()
    result = dm.getActiveReplicas(lfn)
    if result[('Value')][('Successful')]:
        return True
    else:
        return False
 def __init__(self, soft_category):
     """ Constructor
 """
     self.SW_SHARED_DIR = 'VO_VO_CTA_IN2P3_FR_SW_DIR'
     self.CVMFS_DIR = '/cvmfs/cta.in2p3.fr/software'
     self.LFN_ROOT = '/vo.cta.in2p3.fr/software'
     self.SOFT_CATEGORY_DICT = soft_category
     self.dm = DataManager()
 def _replicate(self, lfn, destinationSE, sourceSE="", localCache=""):
     dm = DataManager()
     result = dm.replicateAndRegister(lfn, destinationSE, sourceSE, '', localCache)
     if not result['OK']:
         print 'ERROR %s' % (result['Message'])
         return result
     else:
         return S_OK(result['Value']['Successful'][lfn])
Exemple #15
0
    def __call__(self):
        """ call me maybe """
        # # counter for failed files
        failedFiles = 0
        # # catalog to use
        catalog = self.operation.Catalog
        dm = DataManager(catalogs=catalog)
        # # get waiting files
        waitingFiles = self.getWaitingFilesList()
        # # loop over files
        for opFile in waitingFiles:

            gMonitor.addMark("RegisterAtt", 1)

            # # get LFN
            lfn = opFile.LFN
            # # and others
            fileTuple = (lfn, opFile.PFN, opFile.Size,
                         self.operation.targetSEList[0], opFile.GUID,
                         opFile.Checksum)
            # # call DataManager
            registerFile = dm.registerFile(fileTuple)
            # # check results
            if not registerFile["OK"] or lfn in registerFile["Value"]["Failed"]:

                gMonitor.addMark("RegisterFail", 1)
                self.dataLoggingClient().addFileRecord(lfn, "RegisterFail",
                                                       catalog, "",
                                                       "RegisterFile")

                reason = registerFile.get(
                    "Message",
                    registerFile.get("Value", {}).get("Failed",
                                                      {}).get(lfn, 'Unknown'))
                errorStr = "failed to register LFN %s: %s" % (lfn, reason)
                opFile.Error = errorStr
                self.log.warn(errorStr)
                failedFiles += 1

            else:

                gMonitor.addMark("RegisterOK", 1)
                self.dataLoggingClient().addFileRecord(lfn, "Register",
                                                       catalog, "",
                                                       "RegisterFile")

                self.log.info("file %s has been registered at %s" %
                              (lfn, catalog))
                opFile.Status = "Done"

        # # final check
        if failedFiles:
            self.log.info("all files processed, %s files failed to register" %
                          failedFiles)
            self.operation.Error = "some files failed to register"
            return S_ERROR(self.operation.Error)

        return S_OK()
Exemple #16
0
def main():
    global overwrite
    Script.registerSwitch("f", "force", "Force overwrite of existing file",
                          setOverwrite)
    Script.parseCommandLine(ignoreErrors=True)
    args = Script.getPositionalArgs()
    if len(args) < 1 or len(args) > 4:
        Script.showHelp(exitCode=1)

    from DIRAC.DataManagementSystem.Client.DataManager import DataManager
    from DIRAC import gLogger
    import DIRAC

    exitCode = 0

    lfns = []
    if len(args) == 1:
        inputFileName = args[0]
        if os.path.exists(inputFileName):
            inputFile = open(inputFileName, "r")
            for line in inputFile:
                line = line.rstrip()
                items = line.split()
                items[0] = items[0].replace("LFN:", "").replace("lfn:", "")
                lfns.append(getDict(items))
            inputFile.close()
        else:
            gLogger.error("Error: LFN list '%s' missing." % inputFileName)
            exitCode = 4
    else:
        lfns.append(getDict(args))

    dm = DataManager()
    for lfn in lfns:
        if not os.path.exists(lfn["localfile"]):
            gLogger.error("File %s must exist locally" % lfn["localfile"])
            exitCode = 1
            continue
        if not os.path.isfile(lfn["localfile"]):
            gLogger.error("%s is not a file" % lfn["localfile"])
            exitCode = 2
            continue

        gLogger.notice("\nUploading %s" % lfn["lfn"])
        res = dm.putAndRegister(lfn["lfn"],
                                lfn["localfile"],
                                lfn["SE"],
                                lfn["guid"],
                                overwrite=overwrite)
        if not res["OK"]:
            exitCode = 3
            gLogger.error("Error: failed to upload %s to %s: %s" %
                          (lfn["lfn"], lfn["SE"], res))
            continue
        else:
            gLogger.notice("Successfully uploaded file to %s" % lfn["SE"])

    DIRAC.exit(exitCode)
Exemple #17
0
    def web_getSelectedFiles(self):
        self.set_header('Content-type', 'text/plain')
        arguments = self.request.arguments
        gLogger.always("submit: incoming arguments %s to getSelectedFiles" %
                       arguments)
        tmpdir = '/tmp/eiscat/' + str(time.time()) + str(random.random())
        dataMgr = DataManager(vo=self.vo)
        lfnStr = str(arguments['path'][0])
        if not os.path.isdir(tmpdir): os.makedirs(tmpdir)
        os.chdir(tmpdir)
        for lfn in lfnStr.split(','):
            gLogger.always("Data manager get file %s" % lfn)
            last_slash = lfn.rfind("/")
            pos_relative = lfn.find("/")
            pos_relative = lfn.find("/", pos_relative + 1)
            pos_relative = lfn.find("/", pos_relative + 1)
            pos_relative = pos_relative
            pathInZip = lfn[pos_relative:last_slash]
            tmpPathInZip = tmpdir + pathInZip
            gLogger.always("path in zip %s" % tmpPathInZip)
            if not os.path.isdir(tmpPathInZip): os.makedirs(tmpPathInZip)
            result = dataMgr.getFile(str(lfn),
                                     destinationDir=str(tmpPathInZip))
            if not result["OK"]:
                gLogger.error("getSelectedFiles: %s" % result["Message"])

        #make zip file
        zipname = tmpdir.split('/')[-1] + '.zip'
        zf = zipfile.ZipFile('/tmp/eiscat/' + zipname, "w")
        gLogger.always("zip file /tmp/eiscat/%s" % zipname)
        gLogger.always("start walk in tmpdir %s" % tmpdir)
        for absolutePath, dirs, files in os.walk(tmpdir):
            gLogger.always("absolute path %s" % absolutePath)
            gLogger.always("files %s" % files)
            for filename in files:
                # relative path form tmpdir current chdir
                pos_relative = absolutePath.find("/")
                pos_relative = absolutePath.find("/", pos_relative + 1)
                pos_relative = absolutePath.find("/", pos_relative + 1)
                pos_relative = absolutePath.find("/", pos_relative + 1)
                pos_relative = pos_relative + 1
                relativePath = absolutePath[pos_relative:]
                gLogger.always("relativePath %s, file %s" %
                               (relativePath, filename))
                zf.write(os.path.join(relativePath, filename))
        zf.close()
        #read zip file
        f = open('/tmp/eiscat/' + zipname, "rb")
        obj = f.read()
        f.close()
        #cleanup
        shutil.rmtree(tmpdir)
        os.remove('/tmp/eiscat/' + zipname)
        self.set_header('Content-Disposition',
                        'attachment; filename="' + zipname)

        self.write(obj)
    def _get_file(self, lfn):
        dm = DataManager()
        result = dm.getFile(lfn, "")
        if not result['OK']:
            return S_ERROR(result['Message'])

        if result['Value']['Failed']:
            return S_ERROR(result['Value'])
        return result
 def _remove_file(self, lfn):
     dm = DataManager()
     res = dm.removeFile([lfn])
     if not res['OK']:
         gLogger.error("Failed to remove data", res['Message'])
         return res
     if lfn in res['Value']['Successful']:
         return S_OK(res['Value']['Successful'])
     return S_ERROR(res['Value']['Failed'])
Exemple #20
0
 def __init__(self, soft_category={"corsika_simhessarray": "simulations"}):
     """ Constructor
 """
     self.SW_SHARED_DIR = 'VO_VO_CTA_IN2P3_FR_SW_DIR'
     self.CVMFS_DIR = '/cvmfs/cta.in2p3.fr/software'
     self.LFN_ROOT = '/vo.cta.in2p3.fr/software'
     # self.SOFT_CATEGORY_DICT = {"corsika_simhessarray":"simulations"}
     self.SOFT_CATEGORY_DICT = soft_category
     self.dm = DataManager()
Exemple #21
0
  def __call__( self ):
    """ call me maybe """
    # # counter for failed files
    failedFiles = 0
    # # catalog(s) to use
    catalogs = self.operation.Catalog
    if catalogs:
      catalogs = [ cat.strip() for cat in catalogs.split( ',' ) ]
    dm = DataManager( catalogs = catalogs )
    # # get waiting files
    waitingFiles = self.getWaitingFilesList()
    # # loop over files
    for opFile in waitingFiles:

      gMonitor.addMark( "RegisterAtt", 1 )

      # # get LFN
      lfn = opFile.LFN
      # # and others
      fileTuple = ( lfn , opFile.PFN, opFile.Size, self.operation.targetSEList[0], opFile.GUID, opFile.Checksum )
      # # call DataManager
      registerFile = dm.registerFile( fileTuple )
      # # check results
      if not registerFile["OK"] or lfn in registerFile["Value"]["Failed"]:

        gMonitor.addMark( "RegisterFail", 1 )
#        self.dataLoggingClient().addFileRecord( lfn, "RegisterFail", ','.join( catalogs ) if catalogs else "all catalogs", "", "RegisterFile" )

        reason = str( registerFile.get( "Message", registerFile.get( "Value", {} ).get( "Failed", {} ).get( lfn, 'Unknown' ) ) )
        errorStr = "failed to register LFN"
        opFile.Error = "%s: %s" % ( errorStr, reason )
        if 'GUID already registered' in reason:
          opFile.Status = 'Failed'
          self.log.error( errorStr, "%s: %s" % ( lfn, reason ) )
        elif 'File already registered with no replicas' in reason:
          self.log.warn( errorStr, "%s: %s, will remove it and retry" % ( lfn, reason ) )
          dm.removeFile( lfn )
        else:
          self.log.warn( errorStr, "%s: %s" % ( lfn, reason ) )
        failedFiles += 1

      else:

        gMonitor.addMark( "RegisterOK", 1 )
#        self.dataLoggingClient().addFileRecord( lfn, "Register", ','.join( catalogs ) if catalogs else "all catalogs", "", "RegisterFile" )

        self.log.verbose( "file %s has been registered at %s" % ( lfn, ','.join( catalogs ) if catalogs else "all catalogs" ) )
        opFile.Status = "Done"

    # # final check
    if failedFiles:
      self.log.warn( "all files processed, %s files failed to register" % failedFiles )
      self.operation.Error = "some files failed to register"
      return S_ERROR( self.operation.Error )

    return S_OK()
Exemple #22
0
  def __call__( self ):
    """ call me maybe """
    # # counter for failed files
    failedFiles = 0
    # # catalog(s) to use
    catalogs = self.operation.Catalog
    if catalogs:
      catalogs = [ cat.strip() for cat in catalogs.split( ',' ) ]
    dm = DataManager( catalogs = catalogs )
    # # get waiting files
    waitingFiles = self.getWaitingFilesList()
    # # loop over files
    for opFile in waitingFiles:

      gMonitor.addMark( "RegisterAtt", 1 )

      # # get LFN
      lfn = opFile.LFN
      # # and others
      fileTuple = ( lfn , opFile.PFN, opFile.Size, self.operation.targetSEList[0], opFile.GUID, opFile.Checksum )
      # # call DataManager
      registerFile = dm.registerFile( fileTuple )
      # # check results
      if not registerFile["OK"] or lfn in registerFile["Value"]["Failed"]:

        gMonitor.addMark( "RegisterFail", 1 )
#        self.dataLoggingClient().addFileRecord( lfn, "RegisterFail", ','.join( catalogs ) if catalogs else "all catalogs", "", "RegisterFile" )

        reason = str( registerFile.get( "Message", registerFile.get( "Value", {} ).get( "Failed", {} ).get( lfn, 'Unknown' ) ) )
        errorStr = "failed to register LFN"
        opFile.Error = "%s: %s" % ( errorStr, reason )
        if 'GUID already registered' in reason:
          opFile.Status = 'Failed'
          self.log.error( errorStr, "%s: %s" % ( lfn, reason ) )
        elif 'File already registered with no replicas' in reason:
          self.log.warn( errorStr, "%s: %s, will remove it and retry" % ( lfn, reason ) )
          dm.removeFile( lfn )
        else:
          self.log.warn( errorStr, "%s: %s" % ( lfn, reason ) )
        failedFiles += 1

      else:

        gMonitor.addMark( "RegisterOK", 1 )
#        self.dataLoggingClient().addFileRecord( lfn, "Register", ','.join( catalogs ) if catalogs else "all catalogs", "", "RegisterFile" )

        self.log.verbose( "file %s has been registered at %s" % ( lfn, ','.join( catalogs ) if catalogs else "all catalogs" ) )
        opFile.Status = "Done"

    # # final check
    if failedFiles:
      self.log.warn( "all files processed, %s files failed to register" % failedFiles )
      self.operation.Error = "some files failed to register"
      return S_ERROR( self.operation.Error )

    return S_OK()
Exemple #23
0
  def setUp( self ):
    super( FailingUserJobTestCase, self ).setUp()

    dm = DataManager()
    res = dm.removeFile( ['/lhcb/testCfg/testVer/LOG/00012345/0006/00012345_00067890.tar',
                          '/lhcb/testCfg/testVer/SIM/00012345/0006/00012345_00067890_1.sim'],
                        force = True )
    if not res['OK']:
      print("Could not remove files", res['Message'])
      exit( 1 )
def doesFileExist(lfn):
    from DIRAC.DataManagementSystem.Client.DataManager import DataManager

    dm = DataManager()
    result = dm.getActiveReplicas(lfn)
    if result[("Value")][("Successful")]:
        return True
    #        print 'File exists.'
    else:
        return False
  def setUp( self ):
    super( IntegrationTest, self ).setUp()

    dm = DataManager()
    res = dm.removeFile( ['/lhcb/testCfg/testVer/LOG/00012345/0006/00012345_00067890.tar',
                          '/lhcb/testCfg/testVer/SIM/00012345/0006/00012345_00067890_1.sim'],
                        force = True )
    if not res['OK']:
      print "Could not remove files", res['Message']
      exit( 1 )
def doesFileExist(lfn):
    from DIRAC.DataManagementSystem.Client.DataManager import DataManager
    dm = DataManager()
    result = dm.getActiveReplicas(lfn)
    if not result['OK']:
        print "ERROR",result['Message']
        return False
    if lfn in result['Value']['Successful']:
        return True
    else:
        return False
def doesFileExist(lfn):
    from DIRAC.DataManagementSystem.Client.DataManager import DataManager
    dm = DataManager()
    result = dm.getActiveReplicas(lfn)
    if not result['OK']:
        print "ERROR", result['Message']
        return False
    if lfn in result['Value']['Successful']:
        return True
    else:
        return False
    def initialize(self):
        self.fileCatalog = FileCatalog()
        self.dm = DataManager()
        self.stagerClient = StorageManagerClient()
        self.dataIntegrityClient = DataIntegrityClient()
        # This sets the Default Proxy to used as that defined under
        # /Operations/Shifter/DataManager
        # the shifterProxy option in the Configuration can be used to change this default.
        self.am_setOption("shifterProxy", "DataManager")

        return S_OK()
Exemple #29
0
    def __init__(self, trans, transID, lfns, asIfProd):
        self.trans = trans
        self.transID = transID
        from DIRAC.TransformationSystem.Client.TransformationClient import TransformationClient
        self.transClient = TransformationClient()
        from LHCbDIRAC.BookkeepingSystem.Client.BookkeepingClient import BookkeepingClient
        self.bk = BookkeepingClient()
        from DIRAC.DataManagementSystem.Client.DataManager import DataManager
        self.dm = DataManager()
        self.asIfProd = asIfProd

        (self.transFiles, self.transReplicas) = self.prepareForPlugin(lfns)
Exemple #30
0
def getFilesToStage(lfnList):
    """ Utility that returns out of a list of LFNs those files that are offline,
      and those for which at least one copy is online
  """
    onlineLFNs = set()
    offlineLFNsDict = {}

    if not lfnList:
        return S_OK({
            'onlineLFNs': list(onlineLFNs),
            'offlineLFNs': offlineLFNsDict
        })

    dm = DataManager()

    lfnListReplicas = dm.getActiveReplicas(lfnList)
    if not lfnListReplicas['OK']:
        return lfnListReplicas

    seObjectsDict = dict()
    seToLFNs = dict()

    if lfnListReplicas['Value']['Failed']:
        return S_ERROR("Failures in getting replicas")
    for lfn, ld in lfnListReplicas['Value']['Successful'].iteritems():
        for se, _ in ld.iteritems():
            seObjectsDict.setdefault(se, StorageElement(se))
            seToLFNs.setdefault(se, list()).append(lfn)

    for se, lfnsInSEList in seToLFNs.iteritems():
        fileMetadata = seObjectsDict[se].getFileMetadata(lfnsInSEList)
        if not fileMetadata['OK']:
            return fileMetadata

        if fileMetadata['Value']['Failed']:
            return S_ERROR("Failures in getting file metadata")
        # is there at least one online?
        for lfn, mDict in fileMetadata['Value']['Successful'].iteritems():
            if mDict['Cached']:
                onlineLFNs.add(lfn)

    offlineLFNs = set(lfnList).difference(onlineLFNs)

    for offlineLFN in offlineLFNs:
        ses = lfnListReplicas['Value']['Successful'][offlineLFN].keys()
        random.shuffle(ses)
        se = ses[0]
        offlineLFNsDict.setdefault(se, list()).append(offlineLFN)

    return S_OK({
        'onlineLFNs': list(onlineLFNs),
        'offlineLFNs': offlineLFNsDict
    })
Exemple #31
0
 def __init__(self):
     '''
 Constructor
 '''
     super(MoveInFC, self).__init__()
     self.enable = False
     self.STEP_NUMBER = ''
     self.log = gLogger.getSubLogger("MoveInFC")
     self.applicationName = 'MoveInFC'
     self.repMan = DataManager()
     self.listoutput = {}
     self.outputpath = ''
Exemple #32
0
    def setUp(self):
        super(IntegrationTest, self).setUp()

        dm = DataManager()
        res = dm.removeFile([
            '/lhcb/testCfg/testVer/LOG/00012345/0006/00012345_00067890.tar',
            '/lhcb/testCfg/testVer/SIM/00012345/0006/00012345_00067890_1.sim'
        ],
                            force=True)
        if not res['OK']:
            print "Could not remove files", res['Message']
            exit(1)
    def _add_file(self, lfn, localfile, SE, guid=None):
        dm = DataManager()
        self._create_test_file()
        if not os.path.exists(self.options['test_file']):
            gLogger.error("File %s must exist locally" % localfile)
        if not os.path.isfile(self.options['test_file']):
            gLogger.error("%s is not a file" % localfile)

        res = dm.putAndRegister(lfn, localfile, SE, guid)
        if not res['OK']:
            gLogger.error('Error: failed to upload %s to %s' % (lfn, SE))
            return S_ERROR(res['Message'])
        return S_OK(res['Value']['Successful'][lfn])
def downloadFile(lfn):
    """ Download a file using DMS
    Keyword arguments:
    lfn -- a logical file name
    """
    DIRAC.gLogger.info('Downloading ',lfn)
    dm = DataManager()
    res=dm.getFile(lfn)
    if not res['OK']:
        DIRAC.gLogger.error ( res['Message'] )
        DIRAC.gLogger.error ( 'Could not download %s'%lfn )
        DIRAC.exit( -1 )
    return DIRAC.S_OK()
Exemple #35
0
def removeOutputData(baseDir, logLevel="INFO"):
    gLogger.setLevel(logLevel)
    res = getProxyInfo(False, False)
    if not res["OK"]:
        gLogger.error("Failed to get client proxy information.", res["Message"])
        return {"OK": False, "Message": "Failed to get client proxy information: %s" % str(res["Message"]), "RC": 71}

    # ######################################################################################################## #
    rm = DataManager()
    try:
        result = rm.cleanLogicalDirectory(baseDir)
    except KeyError, ke:
        return {"OK": False, "Message": "Caught key error, full stacktrace below\n%s" % str(ke), "RC": 137}
def main():
    # Registering arguments will automatically add their description to the help menu
    Script.registerArgument(("LocalFile: Path to local file containing LFNs",
                             "LFN:       Logical File Names"))
    Script.registerArgument(" SE:        Storage element")
    Script.parseCommandLine()

    from DIRAC.ConfigurationSystem.Client.Helpers.Operations import Operations

    allowUsers = Operations().getValue(
        "DataManagement/AllowUserReplicaManagement", False)

    from DIRAC.Core.Security.ProxyInfo import getProxyInfo

    res = getProxyInfo()
    if not res["OK"]:
        gLogger.fatal("Can't get proxy info", res["Message"])
        dexit(1)
    properties = res["Value"].get("groupProperties", [])

    if not allowUsers:
        if "FileCatalogManagement" not in properties:
            gLogger.error(
                "You need to use a proxy from a group with FileCatalogManagement"
            )
            dexit(5)

    from DIRAC.DataManagementSystem.Client.DataManager import DataManager

    dm = DataManager()
    # parseCommandLine show help when mandatory arguments are not specified or incorrect argument
    inputFileName, storageElementName = Script.getPositionalArgs(group=True)

    if os.path.exists(inputFileName):
        inputFile = open(inputFileName, "r")
        string = inputFile.read()
        lfns = [lfn.strip() for lfn in string.splitlines()]
        inputFile.close()
    else:
        lfns = [inputFileName]

    res = dm.removeReplicaFromCatalog(storageElementName, lfns)
    if not res["OK"]:
        print(res["Message"])
        dexit(0)
    for lfn in sorted(res["Value"]["Failed"]):
        message = res["Value"]["Failed"][lfn]
        print("Failed to remove %s replica of %s: %s" %
              (storageElementName, lfn, message))
    print("Successfully remove %d catalog replicas at %s" %
          (len(res["Value"]["Successful"]), storageElementName))
Exemple #37
0
    def setUp(self):
        super(FailingUserJobTestCase, self).setUp()

        dm = DataManager()
        res = dm.removeFile(
            [
                "/lhcb/testCfg/testVer/LOG/00012345/0006/00012345_00067890.tar",
                "/lhcb/testCfg/testVer/SIM/00012345/0006/00012345_00067890_1.sim",
            ],
            force=True,
        )
        if not res["OK"]:
            print("Could not remove files", res["Message"])
            exit(1)
Exemple #38
0
    def __init__(self, url=None, useCertificates=False, **kwargs):
        """c'tor

    :param self: self reference
    :param bool useCertificates: flag to enable/disable certificates
    """
        super(FTSClient, self).__init__(**kwargs)
        self.log = gLogger.getSubLogger("DataManagement/FTSClient")
        self.serverURL = 'DataManagement/FTSManager' if not url else url

        # getting other clients
        self.ftsValidator = FTSValidator()
        self.dataManager = DataManager()
        self.storageFactory = StorageFactory()
Exemple #39
0
    def setUp(self):
        super(FailingUserJobTestCase, self).setUp()

        dm = DataManager()
        res = dm.removeFile(
            [
                "/lhcb/testCfg/testVer/LOG/00012345/0006/00012345_00067890.tar",
                "/lhcb/testCfg/testVer/SIM/00012345/0006/00012345_00067890_1.sim",
            ],
            force=True,
        )
        if not res["OK"]:
            print "Could not remove files", res["Message"]
            exit(1)
Exemple #40
0
def main():
    dm = DataManager()

    fileTupleBuffer = []

    counter = 0
    for f in files:
        counter += 1

        if not f.startswith('/cefs'):
            gLogger.error('File must be under "/cefs"')
            return 1

        lfn = '/cepc/lustre-ro' + f

        result = fcc.isFile(lfn)
        if result['OK'] and lfn in result['Value']['Successful'] and result[
                'Value']['Successful'][lfn]:
            continue

        size = os.path.getsize(f)
        adler32 = fileAdler(f)
        guid = makeGuid()
        fileTuple = (lfn, f, size, _se, guid, adler32)
        fileTupleBuffer.append(fileTuple)
        gLogger.debug('Register to lfn: %s' % lfn)
        gLogger.debug('fileTuple: %s' % (fileTuple, ))

        if len(fileTupleBuffer) >= _bufferSize:
            result = dm.registerFile(fileTupleBuffer)
            print('register result', result)

            if not result['OK']:
                gLogger.error('Register file failed')
                return 1
            del fileTupleBuffer[:]
            gLogger.debug('%s files registered' % counter)

    if fileTupleBuffer:
        result = dm.registerFile(fileTupleBuffer)
        print('register result', result)

        if not result['OK']:
            gLogger.error('Register file failed')
            return 1
        del fileTupleBuffer[:]

    gLogger.info('Totally %s files registered' % counter)
    return 0
def getFilesToStage( lfnList ):
  """ Utility that returns out of a list of LFNs those files that are offline,
      and those for which at least one copy is online
  """
  onlineLFNs = set()
  offlineLFNsDict = {}

  if not lfnList:
    return S_OK( {'onlineLFNs':list( onlineLFNs ), 'offlineLFNs': offlineLFNsDict} )
  
  dm = DataManager()

  lfnListReplicas = dm.getActiveReplicas( lfnList )
  if not lfnListReplicas['OK']:
    return lfnListReplicas

  seObjectsDict = dict()
  seToLFNs = dict()
  
  if lfnListReplicas['Value']['Failed']:
    return S_ERROR( "Failures in getting replicas" )
  for lfn, ld in lfnListReplicas['Value']['Successful'].iteritems():
    for se, _ in ld.iteritems():
      seObjectsDict.setdefault( se, StorageElement( se ) )
      seToLFNs.setdefault( se, list() ).append( lfn )

  for se, lfnsInSEList in seToLFNs.iteritems():
    fileMetadata = seObjectsDict[se].getFileMetadata( lfnsInSEList )
    if not fileMetadata['OK']:
      return fileMetadata

    if fileMetadata['Value']['Failed']:
      return S_ERROR( "Failures in getting file metadata" )
    # is there at least one online?
    for lfn, mDict in fileMetadata['Value']['Successful'].iteritems():
      if mDict['Cached']:
        onlineLFNs.add( lfn )

  offlineLFNs = set( lfnList ).difference( onlineLFNs )
  

  for offlineLFN in offlineLFNs:
    ses = lfnListReplicas['Value']['Successful'][offlineLFN].keys()
    random.shuffle( ses )
    se = ses[0]
    offlineLFNsDict.setdefault( se, list() ).append( offlineLFN )
  
  return S_OK( {'onlineLFNs':list( onlineLFNs ), 'offlineLFNs': offlineLFNsDict} )
def main():
    Script.parseCommandLine()

    from DIRAC.ConfigurationSystem.Client.Helpers.Operations import Operations
    allowUsers = Operations().getValue(
        "DataManagement/AllowUserReplicaManagement", False)

    from DIRAC.Core.Security.ProxyInfo import getProxyInfo
    res = getProxyInfo()
    if not res['OK']:
        gLogger.fatal("Can't get proxy info", res['Message'])
        dexit(1)
    properties = res['Value'].get('groupProperties', [])

    if not allowUsers:
        if 'FileCatalogManagement' not in properties:
            gLogger.error(
                "You need to use a proxy from a group with FileCatalogManagement"
            )
            dexit(5)

    from DIRAC.DataManagementSystem.Client.DataManager import DataManager
    dm = DataManager()
    args = Script.getPositionalArgs()
    if len(args) < 2:
        Script.showHelp(exitCode=1)
    else:
        inputFileName = args[0]
        storageElementName = args[1]

    if os.path.exists(inputFileName):
        inputFile = open(inputFileName, 'r')
        string = inputFile.read()
        lfns = [lfn.strip() for lfn in string.splitlines()]
        inputFile.close()
    else:
        lfns = [inputFileName]

    res = dm.removeReplicaFromCatalog(storageElementName, lfns)
    if not res['OK']:
        print(res['Message'])
        dexit(0)
    for lfn in sorted(res['Value']['Failed']):
        message = res['Value']['Failed'][lfn]
        print('Failed to remove %s replica of %s: %s' %
              (storageElementName, lfn, message))
    print('Successfully remove %d catalog replicas at %s' %
          (len(res['Value']['Successful']), storageElementName))
Exemple #43
0
def addDataFiles(args):

    fcc = FileCatalogClient()
    dm = DataManager(['FileCatalog', 'TSCatalog'])

    outputPath = args[0]
    outputPattern = args[1]
    outputSE = args[2]
    metadata = args[3]
    metadata = json.loads(metadata)

    ## Create path
    res = fcc.createDirectory(outputPath)
    if not res['OK']:
        return res

    ##  Set metadata
    for key, value in metadata.items():
        res = fcc.setMetadata(outputPath, {key: value})
        # terrible hack
        # if not res['OK']:
        #  return res

    ## Upload data files
    all_files = glob.glob(outputPattern)

    ## Check that data files exist
    if len(all_files) == 0:
        return DIRAC.S_ERROR('No data files found')

    for one_file in all_files:
        lfn = os.path.join(outputPath, one_file)
        msg = 'Try to upload local file: %s \nwith LFN: %s \nto %s' % (
            one_file, lfn, outputSE)
        DIRAC.gLogger.notice(msg)
        res = dm.putAndRegister(lfn, one_file, outputSE)
        ##  Check if failed
        if not res['OK']:
            DIRAC.gLogger.error(
                'Failed to putAndRegister %s \nto %s \nwith message: %s' %
                (lfn, outputSE, res['Message']))
            return res
        elif res['Value']['Failed'].has_key(lfn):
            DIRAC.gLogger.error('Failed to putAndRegister %s to %s' %
                                (lfn, outputSE))
            return res

    return DIRAC.S_OK()
Exemple #44
0
 def __init__(self):
   super(WhizardAnalysis, self).__init__()
   self.enable = True
   self.STEP_NUMBER = ''
   self.debug = True
   self.SteeringFile = ''
   self.OutputFile = ''
   self.NumberOfEvents = 1
   self.Lumi = 0
   self.applicationName = 'whizard'
   self.evttype = ""
   self.RandomSeed = 0
   self.getProcessInFile = False
   self.datMan = DataManager()
   self.processlist = None
   self.parameters = {}
   self.susymodel = 0
   self.Model = ''
   self.genmodel = GeneratorModels()
   self.eventstring = ['! ', 'Fatal error:', 'PYSTOP', 'No matrix element available',
                       'Floating point exception', 'Event generation finished.', " n_events","luminosity", 
                       "  sum            "]
   self.excludeAllButEventString = False
   self.steeringparameters = ''
   self.options = None
   self.optionsdict = {}
   self.OptionsDictStr = ''
   self.GenLevelCutDictStr = ''
   self.genlevelcuts = {}
   self.willCut = False
   self.useGridFiles = False
Exemple #45
0
def main():
    Script.parseCommandLine()

    from DIRAC import gLogger
    from DIRAC.DataManagementSystem.Client.DataManager import DataManager

    args = Script.getPositionalArgs()
    if not len(args) == 2:
        Script.showHelp(exitCode=1)
    else:
        inputFileName = args[0]
        storageElement = args[1]

    if os.path.exists(inputFileName):
        inputFile = open(inputFileName, 'r')
        string = inputFile.read()
        lfns = [lfn.strip() for lfn in string.splitlines()]
        inputFile.close()
    else:
        lfns = [inputFileName]

    res = DataManager().getReplicaMetadata(lfns, storageElement)
    if not res['OK']:
        print('Error:', res['Message'])
        DIRACExit(1)

    print('%s %s %s %s' % ('File'.ljust(100), 'Migrated'.ljust(8),
                           'Cached'.ljust(8), 'Size (bytes)'.ljust(10)))
    for lfn, metadata in res['Value']['Successful'].items():
        print('%s %s %s %s' %
              (lfn.ljust(100), str(metadata['Migrated']).ljust(8),
               str(metadata.get('Cached', metadata['Accessible'])).ljust(8),
               str(metadata['Size']).ljust(10)))
    for lfn, reason in res['Value']['Failed'].items():
        print('%s %s' % (lfn.ljust(100), reason.ljust(8)))
    def cleanMetadataCatalogFiles(self, transID):
        """ wipe out files from catalog """
        res = self.metadataClient.findFilesByMetadata(
            {self.transfidmeta: transID})
        if not res['OK']:
            return res
        fileToRemove = res['Value']
        if not fileToRemove:
            self.log.info('No files found for transID %s' % transID)
            return S_OK()

        # Executing with shifter proxy
        gConfigurationData.setOptionInCFG(
            '/DIRAC/Security/UseServerCertificate', 'false')
        res = DataManager().removeFile(fileToRemove, force=True)
        gConfigurationData.setOptionInCFG(
            '/DIRAC/Security/UseServerCertificate', 'true')

        if not res['OK']:
            return res
        for lfn, reason in res['Value']['Failed'].items():
            self.log.error("Failed to remove file found in metadata catalog",
                           "%s %s" % (lfn, reason))
        if res['Value']['Failed']:
            return S_ERROR(
                "Failed to remove all files found in the metadata catalog")
        self.log.info("Successfully removed all files found in the BK")
        return S_OK()
Exemple #47
0
 def __init__(self):
   super(OverlayInput, self).__init__()
   self.enable = True
   self.STEP_NUMBER = ''
   self.applicationName = 'OverlayInput'
   self.curdir = os.getcwd()
   self.applicationLog = ''
   self.printoutflag = ''
   self.prodid = 0
   self.detector = '' ##needed for backward compatibility
   self.detectormodel = ""
   self.energytouse = ''
   self.energy = 0
   self.nbofeventsperfile = 100
   self.lfns = []
   self.nbfilestoget = 0
   self.BkgEvtType = 'gghad'
   self.metaEventType = self.BkgEvtType
   self.BXOverlay = 0
   self.ggtohadint = 3.2
   self.nbsigeventsperfile = 0
   self.nbinputsigfile = 1
   self.NbSigEvtsPerJob = 0
   self.datMan = DataManager()
   self.fcc = FileCatalogClient()
   self.site = DIRAC.siteName()
   self.useEnergyForFileLookup = True
   self.machine = 'clic_cdr'
   self.pathToOverlayFiles = ''
   self.processorName = ''
Exemple #48
0
  def __call__( self ):
    """ call me maybe """
    # # counter for failed files
    failedFiles = 0
    # # catalog to use
    catalog = self.operation.Catalog
    dm = DataManager( catalogs = catalog )
    # # get waiting files
    waitingFiles = self.getWaitingFilesList()
    # # loop over files
    for opFile in waitingFiles:

      gMonitor.addMark( "RegisterAtt", 1 )

      # # get LFN
      lfn = opFile.LFN
      # # and others
      fileTuple = ( lfn , opFile.PFN, opFile.Size, self.operation.targetSEList[0], opFile.GUID, opFile.Checksum )
      # # call DataManager
      registerFile = dm.registerFile( fileTuple )
      # # check results
      if not registerFile["OK"] or lfn in registerFile["Value"]["Failed"]:

        gMonitor.addMark( "RegisterFail", 1 )
        self.dataLoggingClient().addFileRecord( lfn, "RegisterFail", catalog, "", "RegisterFile" )

        reason = registerFile.get( "Message", registerFile.get( "Value", {} ).get( "Failed", {} ).get( lfn, 'Unknown' ) )
        errorStr = "failed to register LFN %s: %s" % ( lfn, reason )
        opFile.Error = errorStr
        self.log.warn( errorStr )
        failedFiles += 1

      else:

        gMonitor.addMark( "RegisterOK", 1 )
        self.dataLoggingClient().addFileRecord( lfn, "Register", catalog, "", "RegisterFile" )

        self.log.info( "file %s has been registered at %s" % ( lfn, catalog ) )
        opFile.Status = "Done"

    # # final check
    if failedFiles:
      self.log.info( "all files processed, %s files failed to register" % failedFiles )
      self.operation.Error = "some files failed to register"
      return S_ERROR( self.operation.Error )

    return S_OK()
 def __prepareFileForHTTP( self, lfn, key ):
   """ proxied preapre file for HTTP """
   global HTTP_PATH
   
   res = self.__prepareSecurityDetails()
   if not res['OK']:
     return res
 
   # Clear the local cache
   getFileDir = "%s/%s" % ( HTTP_PATH, key )
   os.makedirs(getFileDir)
  
   # Get the file to the cache 
   from DIRAC.DataManagementSystem.Client.DataManager import DataManager
   dataMgr = DataManager()
   result = dataMgr.getFile( lfn, destinationDir = getFileDir )
   result['CachePath'] = getFileDir
   return result  
Exemple #50
0
 def __copyToExternalSE( self, localFilePath, sbPath ):
   """
   Copy uploaded file to external SE
   """
   try:
     dm = DataManager()
     result = dm.put( sbPath, localFilePath, self.__externalSEName )
     if not result[ 'OK' ]:
       return result
     if 'Successful' not in result[ 'Value' ]:
       gLogger.verbose( "Oops, no successful transfers there", str( result ) )
       return S_ERROR( "RM returned OK to the action but no successful transfers were there" )
     okTrans = result[ 'Value' ][ 'Successful' ]
     if sbPath not in okTrans:
       gLogger.verbose( "Ooops, SB transfer wasn't in the successful ones", str( result ) )
       return S_ERROR( "RM returned OK to the action but SB transfer wasn't in the successful ones" )
     return S_OK( ( self.__externalSEName, okTrans[ sbPath ] ) )
   except Exception, e:
     return S_ERROR( "Error while moving sandbox to SE: %s" % str( e ) )
  def initialize( self ):
    self.fileCatalog = FileCatalog()
    self.dm = DataManager()
    self.stagerClient = StorageManagerClient()
    self.dataIntegrityClient = DataIntegrityClient()
    # This sets the Default Proxy to used as that defined under
    # /Operations/Shifter/DataManager
    # the shifterProxy option in the Configuration can be used to change this default.
    self.am_setOption( 'shifterProxy', 'DataManager' )

    return S_OK()
Exemple #52
0
 def __init__( self, args = None, apis = None ):
   super( SETest, self ).__init__( args, apis )
   
   self.__lfnPath = '/bes/user/z/zhaoxh/'
   self.__testFile = 'test.dat'
   self.__localPath = '/tmp/'
   
   if 'DataManager' in self.apis:
     self.dm = self.apis[ 'DataManager' ]
   else:
     self.dm = DataManager()
Exemple #53
0
 def __init__(self):
   '''
   Constructor
   '''
   super(MoveInFC, self).__init__()
   self.enable = False
   self.STEP_NUMBER = ''
   self.applicationName = 'MoveInFC'
   self.repMan = DataManager()
   self.listoutput = {}
   self.outputpath = ''
Exemple #54
0
    def finish(self):
        """
        after having set all the files, this one does all the job
        @return:
        """
        rc = 0
        rm = DataManager()
        for item in self.listFileStaged:
            # print("SE '"+self.SE+"' == : '"+str(self.SE == "False")+"'")
            if not self.SE:
                self.log.info("No SE available for '" + item[0] + "'")
                rc += 1
                continue
            else:
                self.log.info("Trying to store '" + item[0] + "' in SE : '" + self.SE + "' ...")
                result = rm.putAndRegister(item[1], item[0], self.SE)
                if not result["OK"]:
                    self.log.info("ERROR %s" % (result["Message"]))

                    self.log.info("Wait 5sec before trying again...")
                    time.sleep(5)
                    result = rm.putAndRegister(item[1], item[0], self.SE)
                    if not result["OK"]:
                        self.log.info("ERROR %s" % (result["Message"]))
                        while not result["OK"]:
                            self.listSEs.remove(self.SE)  # make sure not to pick the same SE again.
                            self.__pickRandomSE()
                            if not self.SE:
                                rc += 1
                                break
                            self.log.info("Trying with another SE : '" + self.SE + "' . In 5sec...")
                            time.sleep(5)
                            result = rm.putAndRegister(item[1], item[0], self.SE)
                            if result["OK"]:
                                self.log.info("file stored : '" + item[1] + "' in '" + self.SE + "'")
                            else:
                                self.log.error("ERROR : failed to store the file '" + item[1] + "' ...")
                                rc += 1

        return rc
Exemple #55
0
  def getProcessList(self): 
    """ Get the :mod:`ProcessList <ILCDIRAC.Core.Utilities.ProcessList.ProcessList>`
    needed by :mod:`Whizard <ILCDIRAC.Interfaces.API.NewInterface.Applications.Whizard>`.

    :return: process list object
    """
    processlistpath = gConfig.getValue("/LocalSite/ProcessListPath", "")
    if not processlistpath:
      gLogger.info('Will download the process list locally. To gain time, please put it somewhere and add to \
      your dirac.cfg the entry /LocalSite/ProcessListPath pointing to the file')
      pathtofile = self.ops.getValue("/ProcessList/Location", "")
      if not pathtofile:
        gLogger.error("Could not get path to process list")
        processlist = ""
      else:
        datMan = DataManager()
        datMan.getFile(pathtofile)
        processlist = os.path.basename(pathtofile)   
    else:
      processlist = processlistpath
    self.processList = ProcessList(processlist)
    return self.processList
Exemple #56
0
 def __init__(self):
   super(Whizard2Analysis, self).__init__()
   self.enable = True
   self.STEP_NUMBER = ''
   self.result = S_ERROR()
   self.applicationName = 'whizard2'
   self.startFrom = 0
   self.randomSeed = -1
   self.whizard2SinFile = ''
   self.eventstring = ['+++ Generating event']
   self.decayProc = ['decay_proc']
   self.integratedProcess = ''
   self.datMan = DataManager()
  def initialize( self ):
    """ agent initialisation

    reading and setting confing opts

    :param self: self reference
    """
    # # shifter proxy
    self.am_setOption( 'shifterProxy', 'DataManager' )
    # # transformations types
    self.dataProcTTypes = Operations().getValue( 'Transformations/DataProcessing', ['MCSimulation', 'Merge'] )
    self.dataManipTTypes = Operations().getValue( 'Transformations/DataManipulation', ['Replication', 'Removal'] )
    agentTSTypes = self.am_getOption( 'TransformationTypes', [] )
    if agentTSTypes:
      self.transformationTypes = sorted( agentTSTypes )
    else:
      self.transformationTypes = sorted( self.dataProcTTypes + self.dataManipTTypes )
    self.log.info( "Will consider the following transformation types: %s" % str( self.transformationTypes ) )
    # # directory locations
    self.directoryLocations = sorted( self.am_getOption( 'DirectoryLocations', [ 'TransformationDB',
                                                                                   'MetadataCatalog' ] ) )
    self.log.info( "Will search for directories in the following locations: %s" % str( self.directoryLocations ) )
    # # transformation metadata
    self.transfidmeta = self.am_getOption( 'TransfIDMeta', "TransformationID" )
    self.log.info( "Will use %s as metadata tag name for TransformationID" % self.transfidmeta )
    # # archive periof in days
    self.archiveAfter = self.am_getOption( 'ArchiveAfter', 7 )  # days
    self.log.info( "Will archive Completed transformations after %d days" % self.archiveAfter )
    # # active SEs
    self.activeStorages = sorted( self.am_getOption( 'ActiveSEs', [] ) )
    self.log.info( "Will check the following storage elements: %s" % str( self.activeStorages ) )
    # # transformation log SEs
    self.logSE = self.am_getOption( 'TransformationLogSE', 'LogSE' )
    self.log.info( "Will remove logs found on storage element: %s" % self.logSE )
    # # enable/disable execution, should be using CS option Status?? with default value as 'Active'??
    self.enableFlag = self.am_getOption( 'EnableFlag', 'True' )

    # # data manager
    self.dm = DataManager()
    # # transformation client
    self.transClient = TransformationClient()
    # # wms client
    self.wmsClient = WMSClient()
    # # request client
    self.reqClient = ReqClient()
    # # file catalog client
    self.metadataClient = FileCatalogClient()

    return S_OK()
Exemple #58
0
def downloadFile(tarballURL, app_tar, folder_name):
  """ Get the file locally.
  """
  #need to make sure the url ends with /, other wise concatenation below returns bad url
  if tarballURL[-1] != "/":
    tarballURL += "/"

  app_tar_base = os.path.basename(app_tar)
  if tarballURL.find("http://")>-1:
    try :
      gLogger.debug("Downloading software", '%s' % (folder_name))
      #Copy the file locally, don't try to read from remote, soooo slow
      #Use string conversion %s%s to set the address, makes the system more stable
      urllib.urlretrieve("%s%s" % (tarballURL, app_tar), app_tar_base)
    except IOError as err:
      gLogger.exception(str(err))
      return S_ERROR('Exception during url retrieve: %s' % str(err))
  else:
    datMan = DataManager()
    resget = datMan.getFile("%s%s" % (tarballURL, app_tar))
    if not resget['OK']:
      gLogger.error("File could not be downloaded from the grid")
      return resget
  return S_OK()
Exemple #59
0
  def initializeOptimizer( self ):
    """Initialize specific parameters for JobSanityAgent.
    """
    self.failedMinorStatus = self.am_getOption( '/FailedJobStatus', 'Input Data Not Available' )
    #this will ignore failover SE files
    self.checkFileMetadata = self.am_getOption( 'CheckFileMetadata', True )

    self.dataManager = DataManager()
    self.resourceStatus = ResourceStatus()
    self.fc = FileCatalog()

    self.seToSiteMapping = {}
    self.lastCScheck = 0
    self.cacheLength = 600

    return S_OK()
Exemple #60
0
  def __init__( self, useCertificates = False ):
    """c'tor

    :param self: self reference
    :param bool useCertificates: flag to enable/disable certificates
    """
    Client.__init__( self )
    self.log = gLogger.getSubLogger( "DataManagement/FTSClient" )
    self.setServer( "DataManagement/FTSManager" )

    # getting other clients
    self.ftsValidator = FTSValidator()
    self.dataManager = DataManager()
    self.storageFactory = StorageFactory()

    url = PathFinder.getServiceURL( "DataManagement/FTSManager" )
    if not url:
      raise RuntimeError( "CS option DataManagement/FTSManager URL is not set!" )
    self.ftsManager = RPCClient( url )