def main(): Script.registerSwitch('', 'Full', ' Print full list of requests') from DIRAC.Core.Base.Script import parseCommandLine parseCommandLine() from DIRAC.RequestManagementSystem.Client.ReqClient import ReqClient fullPrint = False for switch in Script.getUnprocessedSwitches(): if switch[0] == 'Full': fullPrint = True reqClient = ReqClient() for server, rpcClient in reqClient.requestProxies().items(): DIRAC.gLogger.always("Checking request cache at %s" % server) reqCache = rpcClient.listCacheDir() if not reqCache['OK']: DIRAC.gLogger.error("Cannot list request cache", reqCache) continue reqCache = reqCache['Value'] if fullPrint: DIRAC.gLogger.always("List of requests", reqCache) else: DIRAC.gLogger.always("Number of requests in the cache", len(reqCache)) DIRAC.exit(0)
def setUp(self): from DIRAC.Core.Base.Script import parseCommandLine parseCommandLine() self.mockRSS = Mock() self.GOCCli = GOCDBClient() self.SLSCli = SLSClient() self.SAMCli = SAMResultsClient() self.GGUSCli = GGUSTicketsClient()
def testLcgFileCatalogProxyClient(): """ basic test of the module """ import os import sys import pprint from DIRAC.Core.Base.Script import parseCommandLine from DIRAC import gLogger, S_OK parseCommandLine() gLogger.setLevel( 'VERBOSE' ) if 'PYTHONOPTIMIZE' in os.environ and os.environ['PYTHONOPTIMIZE']: gLogger.info( 'Unset pyhthon optimization "PYTHONOPTIMIZE"' ) sys.exit( 0 ) gLogger.info( 'Testing LcgFileCatalogProxyClient class...' ) try: result = S_OK() lfcpc = LcgFileCatalogProxyClient() gLogger.info( 'LcgFileCatalogProxyClient instantiated' ) server = lfcpc.getServer() assert server == 'DataManagement/LcgFileCatalogProxy' gLogger.info( ' Connecting to ', server ) timeout = lfcpc.timeout assert timeout == 120 result = lfcpc.listDirectory( '/' ) assert result['OK'] gLogger.info( pprint.pformat( result['Value']['Successful'] ) ) gLogger.info( 'Server is alive' ) except AssertionError, x: if result['OK']: gLogger.error( x ) sys.exit( 1 ) else: gLogger.info( 'Test OK, but could not connect to server' ) gLogger.info( result['Message'] )
def testLcgFileCatalogProxyClient(): """ basic test of the module """ import os import sys import pprint from DIRAC.Core.Base.Script import parseCommandLine from DIRAC import gLogger, S_OK parseCommandLine() gLogger.setLevel('VERBOSE') if 'PYTHONOPTIMIZE' in os.environ and os.environ['PYTHONOPTIMIZE']: gLogger.info('Unset pyhthon optimization "PYTHONOPTIMIZE"') sys.exit(0) gLogger.info('Testing LcgFileCatalogProxyClient class...') try: result = S_OK() lfcpc = LcgFileCatalogProxyClient() gLogger.info('LcgFileCatalogProxyClient instantiated') server = lfcpc.getServer() assert server == 'DataManagement/LcgFileCatalogProxy' gLogger.info(' Connecting to ', server) timeout = lfcpc.timeout assert timeout == 120 result = lfcpc.listDirectory('/') assert result['OK'] gLogger.info(pprint.pformat(result['Value']['Successful'])) gLogger.info('Server is alive') except AssertionError, x: if result['OK']: gLogger.error(x) sys.exit(1) else: gLogger.info('Test OK, but could not connect to server') gLogger.info(result['Message'])
def main(): from DIRAC.Core.Base.Script import parseCommandLine parseCommandLine() import DIRAC from DIRAC.RequestManagementSystem.Client.ReqClient import ReqClient reqClient = ReqClient() dbSummary = reqClient.getDBSummary() if not dbSummary["OK"]: DIRAC.gLogger.error(dbSummary["Message"]) DIRAC.exit(-1) dbSummary = dbSummary["Value"] if not dbSummary: DIRAC.gLogger.info("ReqDB is empty!") DIRAC.exit(0) reqs = dbSummary.get("Request", {}) ops = dbSummary.get("Operation", {}) fs = dbSummary.get("File", {}) DIRAC.gLogger.always("Requests:") for reqState, reqCount in sorted(reqs.items()): DIRAC.gLogger.always("- '%s' %s" % (reqState, reqCount)) DIRAC.gLogger.always("Operations:") for opType, opDict in sorted(ops.items()): DIRAC.gLogger.always("- '%s':" % opType) for opState, opCount in sorted(opDict.items()): DIRAC.gLogger.always(" - '%s' %s" % (opState, opCount)) DIRAC.gLogger.always("Files:") for fState, fCount in sorted(fs.items()): DIRAC.gLogger.always("- '%s' %s" % (fState, fCount)) DIRAC.exit(0)
""" Transformation Database Client Command Line Interface. """ #! /usr/bin/env python from DIRAC.Core.Base.Script import parseCommandLine parseCommandLine() import string, sys, cmd from DIRAC.Core.Base.API import API from DIRAC.Core.Utilities.List import sortList from DIRAC.Core.Utilities.Subprocess import shellCall from DIRAC.TransformationSystem.Client.Transformation import Transformation from DIRAC.TransformationSystem.Client.TransformationClient import TransformationClient def printDict( dictionary ): """ Dictionary pretty printing """ key_max = 0 value_max = 0 for key, value in dictionary.items(): if len( key ) > key_max: key_max = len( key ) if len( str( value ) ) > value_max: value_max = len( str( value ) ) for key, value in dictionary.items(): print key.rjust( key_max ), ' : ', str( value ).ljust( value_max ) class TransformationCLI( cmd.Cmd, API ):
#!/usr/bin/env python from DIRAC.Core.Base.Script import parseCommandLine parseCommandLine() import unittest, os, shutil from TestDIRAC.Utilities.utils import find_all from TestDIRAC.Utilities.IntegrationTest import IntegrationTest from DIRAC.Interfaces.API.Job import Job from DIRAC.Interfaces.API.Dirac import Dirac class RegressionTestCase( IntegrationTest ): """ Base class for the Regression test cases """ def setUp( self ): super( IntegrationTest, self ).setUp() self.dirac = Dirac() exeScriptLoc = find_all( 'exe-script.py', '.', 'Regression' )[0] helloWorldLoc = find_all( 'helloWorld.py', '.', 'Regression' )[0] shutil.copyfile( exeScriptLoc, './exe-script.py' ) shutil.copyfile( helloWorldLoc, './helloWorld.py' ) helloWorldXMLLocation = find_all( 'helloWorld.xml', '.', 'Regression' )[0] self.j_u_hello = Job( helloWorldXMLLocation ) helloWorldXMLFewMoreLocation = find_all( 'helloWorld.xml', '.', 'Regression' )[0]
def diracInit(self): """ Initialise DIRAC and the book keeping """ from DIRAC.Core.Base.Script import parseCommandLine from LHCbDIRAC.BookkeepingSystem.Client.BookkeepingClient import BookkeepingClient parseCommandLine() self.bkClient = BookkeepingClient()
######################################################################## __RCSID__ = "$Id$" import sys import DIRAC import sys import time import random import types from DIRAC import S_OK, S_ERROR from DIRAC.Core.Base.Script import parseCommandLine from DIRAC.Core.DISET.RPCClient import RPCClient parseCommandLine( initializeMonitor = False ) rpcClient = RPCClient( "WorkloadManagement/Matcher" ) print "Getting TQs.." result = rpcClient.getActiveTaskQueues() if not result[ 'OK' ]: print 'ERROR: %s' % result['Message'] sys.exit( 1 ) tqDict = result[ 'Value' ] for tqId in sorted( tqDict ): print "* TQ %s" % tqId tqData = tqDict[ tqId ] for key in sorted( tqData ): value = tqData[ key ] if type( value ) == types.ListType:
# Author : Ricardo Graciani ######################################################################## __RCSID__ = "$Id$" import sys import DIRAC import sys import time import random import types from DIRAC import S_OK, S_ERROR from DIRAC.Core.Base.Script import parseCommandLine from DIRAC.Core.DISET.RPCClient import RPCClient parseCommandLine(initializeMonitor=False) rpcClient = RPCClient("WorkloadManagement/Matcher") print "Getting TQs.." result = rpcClient.getActiveTaskQueues() if not result['OK']: print 'ERROR: %s' % result['Message'] sys.exit(1) tqDict = result['Value'] for tqId in sorted(tqDict): print "* TQ %s" % tqId tqData = tqDict[tqId] for key in sorted(tqData): value = tqData[key] if type(value) == types.ListType:
def main(): from DIRAC.Core.Base import Script Script.registerSwitch('', 'Job=', ' JobID[,jobID2,...]') Script.registerSwitch('', 'Transformation=', ' transformation ID') Script.registerSwitch( '', 'Tasks=', ' Associated to --Transformation, list of taskIDs') Script.registerSwitch('', 'Verbose', ' Print more information') Script.registerSwitch('', 'Terse', ' Only print request status') Script.registerSwitch('', 'Full', ' Print full request content') Script.registerSwitch('', 'Status=', ' Select all requests in a given status') Script.registerSwitch( '', 'Since=', ' Associated to --Status, start date yyyy-mm-dd or nb of days (default= -one day' ) Script.registerSwitch( '', 'Until=', ' Associated to --Status, end date (default= now') Script.registerSwitch( '', 'Maximum=', ' Associated to --Status, max number of requests ') Script.registerSwitch('', 'Reset', ' Reset Failed files to Waiting if any') Script.registerSwitch('', 'Force', ' Force reset even if not Failed') Script.registerSwitch( '', 'All', ' (if --Status Failed) all requests, otherwise exclude irrecoverable failures' ) Script.registerSwitch('', 'FixJob', ' Set job Done if the request is Done') Script.registerSwitch('', 'Cancel', ' Cancel the request') Script.registerSwitch('', 'ListJobs', ' List the corresponding jobs') Script.registerSwitch( '', 'TargetSE=', ' Select request only if that SE is in the targetSEs') from DIRAC.Core.Base.Script import parseCommandLine parseCommandLine() import DIRAC from DIRAC import gLogger jobs = [] requestID = 0 transID = None taskIDs = None tasks = None requests = [] full = False verbose = False status = None until = None since = None terse = False allR = False reset = False fixJob = False maxRequests = 999999999999 cancel = False listJobs = False force = False targetSE = set() for switch in Script.getUnprocessedSwitches(): if switch[0] == 'Job': jobs = [] job = "Unknown" try: for arg in switch[1].split(','): if os.path.exists(arg): with open(arg, 'r') as fp: lines = fp.readlines() for line in lines: for job in line.split(','): jobs += [int(job.strip())] gLogger.notice("Found %d jobs in file %s" % (len(jobs), arg)) else: jobs.append(int(arg)) except TypeError: gLogger.fatal("Invalid jobID", job) elif switch[0] == 'Transformation': try: transID = int(switch[1]) except Exception: gLogger.fatal('Invalid transID', switch[1]) elif switch[0] == 'Tasks': try: taskIDs = [int(task) for task in switch[1].split(',')] except Exception: gLogger.fatal('Invalid tasks', switch[1]) elif switch[0] == 'Full': full = True elif switch[0] == 'Verbose': verbose = True elif switch[0] == 'Terse': terse = True elif switch[0] == 'All': allR = True elif switch[0] == 'Reset': reset = True elif switch[0] == 'Force': force = True elif switch[0] == 'Status': status = switch[1].capitalize() elif switch[0] == 'Since': since = convertDate(switch[1]) elif switch[0] == 'Until': until = convertDate(switch[1]) elif switch[0] == 'FixJob': fixJob = True elif switch[0] == 'Cancel': cancel = True elif switch[0] == 'ListJobs': listJobs = True elif switch[0] == 'Maximum': try: maxRequests = int(switch[1]) except Exception: pass elif switch[0] == 'TargetSE': targetSE = set(switch[1].split(',')) if reset and not force: status = 'Failed' if fixJob: status = 'Done' if terse: verbose = True if status: if not until: until = datetime.datetime.utcnow() if not since: since = until - datetime.timedelta(hours=24) from DIRAC.RequestManagementSystem.Client.ReqClient import ReqClient from DIRAC.RequestManagementSystem.Client.ReqClient import printRequest, recoverableRequest reqClient = ReqClient() if transID: if not taskIDs: gLogger.fatal( "If Transformation is set, a list of Tasks should also be set") Script.showHelp(exitCode=2) # In principle, the task name is unique, so the request name should be unique as well # If ever this would not work anymore, we would need to use the transformationClient # to fetch the ExternalID requests = ['%08d_%08d' % (transID, task) for task in taskIDs] allR = True elif not jobs: requests = [] # Get full list of arguments, with and without comma for arg in [ x.strip() for arg in Script.getPositionalArgs() for x in arg.split(',') ]: if os.path.exists(arg): lines = open(arg, 'r').readlines() requests += [ reqID.strip() for line in lines for reqID in line.split(',') ] gLogger.notice("Found %d requests in file" % len(requests)) else: requests.append(arg) allR = True else: res = reqClient.getRequestIDsForJobs(jobs) if not res['OK']: gLogger.fatal("Error getting request for jobs", res['Message']) DIRAC.exit(2) if res['Value']['Failed']: gLogger.error( "No request found for jobs %s" % ','.join(sorted(str(job) for job in res['Value']['Failed']))) requests = sorted(res['Value']['Successful'].values()) if requests: allR = True else: DIRAC.exit(0) if status and not requests: allR = allR or status != 'Failed' res = reqClient.getRequestIDsList([status], limit=maxRequests, since=since, until=until) if not res['OK']: gLogger.error("Error getting requests:", res['Message']) DIRAC.exit(2) requests = [ reqID for reqID, _st, updTime in res['Value'] if updTime > since and updTime <= until and reqID ] gLogger.notice('Obtained %d requests %s between %s and %s' % (len(requests), status, since, until)) if not requests: gLogger.notice('No request selected....') Script.showHelp(exitCode=2) okRequests = [] warningPrinted = False jobIDList = [] for reqID in requests: # We allow reqID to be the requestName if it is unique try: requestID = int(reqID) except ValueError: requestID = reqClient.getRequestIDForName(reqID) if not requestID['OK']: gLogger.notice(requestID['Message']) continue requestID = requestID['Value'] request = reqClient.peekRequest(requestID) if not request["OK"]: gLogger.error(request["Message"]) DIRAC.exit(-1) request = request["Value"] if not request: gLogger.error("no such request %s" % requestID) continue # If no operation as the targetSE, skip if targetSE: found = False for op in request: if op.TargetSE and targetSE.intersection( op.TargetSE.split(',')): found = True break if not found: continue # keep a list of jobIDs if requested if request.JobID and listJobs: jobIDList.append(request.JobID) if status and request.Status != status: gLogger.notice( "Request %s is not in requested status %s%s" % (reqID, status, ' (cannot be reset)' if reset else '')) continue if fixJob and request.Status == 'Done' and request.JobID: # The request is for a job and is Done, verify that the job is in the proper status result = reqClient.finalizeRequest(request.RequestID, request.JobID, useCertificates=False) if not result['OK']: gLogger.error("Error finalizing job", result['Message']) else: gLogger.notice("Job %d updated to %s" % (request.JobID, result['Value'])) continue if cancel: if request.Status not in ('Done', 'Failed'): ret = reqClient.cancelRequest(requestID) if not ret['OK']: gLogger.error("Error canceling request %s" % reqID, ret['Message']) else: gLogger.notice("Request %s cancelled" % reqID) else: gLogger.notice("Request %s is in status %s, not cancelled" % (reqID, request.Status)) elif allR or recoverableRequest(request): okRequests.append(str(requestID)) if reset: gLogger.notice('============ Request %s =============' % requestID) ret = reqClient.resetFailedRequest(requestID, allR=allR) if not ret['OK']: gLogger.error("Error resetting request %s" % requestID, ret['Message']) else: if len(requests) > 1: gLogger.notice('\n===================================') dbStatus = reqClient.getRequestStatus(requestID).get( 'Value', 'Unknown') printRequest(request, status=dbStatus, full=full, verbose=verbose, terse=terse) if listJobs: gLogger.notice("List of %d jobs:\n" % len(jobIDList), ','.join(str(jobID) for jobID in jobIDList)) if status and okRequests: from DIRAC.Core.Utilities.List import breakListIntoChunks gLogger.notice('\nList of %d selected requests:' % len(okRequests)) for reqs in breakListIntoChunks(okRequests, 100): gLogger.notice(','.join(reqs))
def main(): from DIRAC.Core.Base.Script import parseCommandLine parseCommandLine() import DIRAC from DIRAC import gLogger args = Script.getPositionalArgs() requestName = None LFN = None PFN = None targetSE = None if len(args) != 4: Script.showHelp() else: requestName = args[0] LFN = args[1] PFN = args[2] targetSE = args[3] if not os.path.isabs(LFN): gLogger.error("LFN should be absolute path!!!") DIRAC.exit(-1) gLogger.info("will create request '%s' with 'PutAndRegister' " "operation using %s pfn and %s target SE" % (requestName, PFN, targetSE)) from DIRAC.RequestManagementSystem.Client.Request import Request from DIRAC.RequestManagementSystem.Client.Operation import Operation from DIRAC.RequestManagementSystem.Client.File import File from DIRAC.RequestManagementSystem.Client.ReqClient import ReqClient from DIRAC.Core.Utilities.Adler import fileAdler if not os.path.exists(PFN): gLogger.error("%s does not exist" % PFN) DIRAC.exit(-1) if not os.path.isfile(PFN): gLogger.error("%s is not a file" % PFN) DIRAC.exit(-1) PFN = os.path.abspath(PFN) size = os.path.getsize(PFN) adler32 = fileAdler(PFN) request = Request() request.RequestName = requestName putAndRegister = Operation() putAndRegister.Type = "PutAndRegister" putAndRegister.TargetSE = targetSE opFile = File() opFile.LFN = LFN opFile.PFN = PFN opFile.Size = size opFile.Checksum = adler32 opFile.ChecksumType = "ADLER32" putAndRegister.addFile(opFile) request.addOperation(putAndRegister) reqClient = ReqClient() putRequest = reqClient.putRequest(request) if not putRequest["OK"]: gLogger.error("unable to put request '%s': %s" % (requestName, putRequest["Message"])) DIRAC.exit(-1) gLogger.always("Request '%s' has been put to ReqDB for execution." % requestName) gLogger.always( "You can monitor its status using command: 'dirac-rms-request %s'" % requestName) DIRAC.exit(0)
def main(): from DIRAC.Core.Base import Script Script.registerSwitch('', 'FromSE=', 'SE1[,SE2,...]') Script.registerSwitch('', 'TargetSE=', 'SE1[,SE2,...]') Script.registerSwitch( '', 'OutputFile=', 'CSV output file (default /tmp/protocol-matrix.csv)') Script.registerSwitch( '', 'Bidirection', 'If FromSE or TargetSE are specified, make a square matrix ') Script.registerSwitch('', 'FTSOnly', 'Only display the protocols sent to FTS') Script.registerSwitch('', 'ExcludeSE=', 'SEs to not take into account for the matrix') from DIRAC.Core.Base.Script import parseCommandLine parseCommandLine() from DIRAC import gConfig, gLogger from DIRAC.DataManagementSystem.Utilities.DMSHelpers import DMSHelpers from DIRAC.Resources.Storage.StorageElement import StorageElement from DIRAC.Core.Security.ProxyInfo import getVOfromProxyGroup fromSE = [] targetSE = [] excludeSE = [] outputFile = '/tmp/protocol-matrix.csv' bidirection = False ftsOnly = False for switch in Script.getUnprocessedSwitches(): if switch[0] == 'FromSE': fromSE = switch[1].split(',') elif switch[0] == 'TargetSE': targetSE = switch[1].split(',') elif switch[0] == 'ExcludeSE': excludeSE = switch[1].split(',') elif switch[0] == 'OutputFile': outputFile = switch[1] elif switch[0] == 'Bidirection': bidirection = True elif switch[0] == 'FTSOnly': ftsOnly = True thirdPartyProtocols = DMSHelpers().getThirdPartyProtocols() # List all the BaseSE seBases = gConfig.getSections('/Resources/StorageElementBases')['Value'] # construct a dict { baseSE : <inherited storages>} seForSeBases = {} allSEs = gConfig.getSections('/Resources/StorageElements/')['Value'] # Remove the SEs that we want to exclude allSEs = set(allSEs) - set(excludeSE) # We go through all the SEs and fill in the seForSEBases dict. # Basically, at the end of the loop, the dict will contain # for each baseSE an entry corresponding to one real storage (the first one) # and itself for each real non inherited SE for se in allSEs: baseSE = gConfig.getOption('/Resources/StorageElements/%s/BaseSE' % se).get('Value') if baseSE: if baseSE not in seForSeBases: seForSeBases[baseSE] = se else: # If no baseSE, we put self seForSeBases[se] = se # Now let's take into account what source and destination we want. # If the user did not specify source or dest, generate everything if not fromSE and not targetSE: fromSE = list(seForSeBases) targetSE = list(seForSeBases) else: # he specified at least source of dest # if bidirection, source and target should be the same if bidirection: if not fromSE and targetSE: # we gave target, but no source fromSE = targetSE elif fromSE and not targetSE: # we gave source but no target targetSE = fromSE elif fromSE and targetSE: # we gave both fromSE = targetSE = list(set(fromSE + targetSE)) else: # no bidirection # he specified a targetSE if not fromSE: fromSE = list(seForSeBases) elif not targetSE: targetSE = list(seForSeBases) fromSE = sorted(fromSE) targetSE = sorted(targetSE) gLogger.notice("Using sources: %s" % ','.join(fromSE)) gLogger.notice("Using target: %s" % ','.join(targetSE)) # Now we construct the SE object for each SE that we want to appear ses = {} for se in set(fromSE + targetSE): ses[se] = StorageElement(seForSeBases.get(se, se)) ret = getVOfromProxyGroup() if not ret['OK'] or not ret.get('Value', ''): gLogger.error('Aborting, Bad Proxy:', ret.get('Message', 'Proxy does not belong to a VO!')) exit(1) vo = ret['Value'] gLogger.notice('Using the Virtual Organization:', vo) # dummy LFN, still has to follow lfn convention lfn = '/%s/toto.xml' % vo # Create a matrix of protocol src/dest tpMatrix = defaultdict(dict) # For each source and destination, generate the url pair, and the compatible third party protocols for src, dst in ((x, y) for x in fromSE for y in targetSE): res = ses[dst].generateTransferURLsBetweenSEs(lfn, ses[src], thirdPartyProtocols) if not res['OK']: surls = 'Error' gLogger.notice( "Could not generate transfer URLS", "src:%s, dst:%s, error:%s" % (src, dst, res['Message'])) else: # We only keep the protocol part of the url surls = '/'.join(res['Value']['Protocols']) # Add also the third party protocols proto = ','.join(ses[dst].negociateProtocolWithOtherSE( ses[src], thirdPartyProtocols)['Value']) if ftsOnly: tpMatrix[src][dst] = '%s' % surls else: tpMatrix[src][dst] = '%s (%s)' % (surls, proto) gLogger.verbose("%s -> %s: %s" % (src, dst, surls)) gLogger.verbose("%s -> %s: %s" % (src, dst, proto)) # Write the matrix in the file with open(outputFile, 'wb') as csvfile: csvWriter = csv.writer(csvfile, delimiter=';', quoting=csv.QUOTE_MINIMAL) csvWriter.writerow(['src/dst'] + targetSE) for src in fromSE: srcRow = [src] for dst in targetSE: srcRow.append(tpMatrix[src].get(dst, 'NA')) csvWriter.writerow(srcRow) gLogger.notice('Wrote Matrix to', outputFile)
def main(): from DIRAC.Core.Base.Script import parseCommandLine parseCommandLine() import DIRAC from DIRAC import gLogger args = Script.getPositionalArgs() if len(args) < 3: Script.showHelp() sourceSE = args[0] lfnList = getLFNList(args[1]) targetSEs = list( set([se for targetSE in args[2:] for se in targetSE.split(',')])) gLogger.info("Will create request with 'MoveReplica' " "operation using %s lfns and %s target SEs" % (len(lfnList), len(targetSEs))) from DIRAC.RequestManagementSystem.Client.ReqClient import ReqClient from DIRAC.RequestManagementSystem.Client.Request import Request from DIRAC.RequestManagementSystem.Client.Operation import Operation from DIRAC.RequestManagementSystem.Client.File import File from DIRAC.Resources.Catalog.FileCatalog import FileCatalog from DIRAC.Core.Utilities.List import breakListIntoChunks lfnChunks = breakListIntoChunks(lfnList, 100) multiRequests = len(lfnChunks) > 1 error = 0 count = 0 reqClient = ReqClient() fc = FileCatalog() for lfnChunk in lfnChunks: metaDatas = fc.getFileMetadata(lfnChunk) if not metaDatas["OK"]: gLogger.error("unable to read metadata for lfns: %s" % metaDatas["Message"]) error = -1 continue metaDatas = metaDatas["Value"] for failedLFN, reason in metaDatas["Failed"].items(): gLogger.error("skipping %s: %s" % (failedLFN, reason)) lfnChunk = set(metaDatas["Successful"]) if not lfnChunk: gLogger.error("LFN list is empty!!!") error = -1 continue if len(lfnChunk) > Operation.MAX_FILES: gLogger.error( "too many LFNs, max number of files per operation is %s" % Operation.MAX_FILES) error = -1 continue count += 1 request = Request() request.RequestName = "%s_%s" % ( md5(repr(time.time()).encode()).hexdigest()[:16], md5(repr(time.time()).encode()).hexdigest()[:16], ) moveReplica = Operation() moveReplica.Type = 'MoveReplica' moveReplica.SourceSE = sourceSE moveReplica.TargetSE = ",".join(targetSEs) for lfn in lfnChunk: metaDict = metaDatas["Successful"][lfn] opFile = File() opFile.LFN = lfn opFile.Size = metaDict["Size"] if "Checksum" in metaDict: # # should check checksum type, now assuming Adler32 (metaDict["ChecksumType"] = 'AD' opFile.Checksum = metaDict["Checksum"] opFile.ChecksumType = "ADLER32" moveReplica.addFile(opFile) request.addOperation(moveReplica) result = reqClient.putRequest(request) if not result["OK"]: gLogger.error("Failed to submit Request: %s" % (result["Message"])) error = -1 continue if not multiRequests: gLogger.always("Request %d submitted successfully" % result['Value']) if multiRequests: gLogger.always("%d requests have been submitted" % (count)) DIRAC.exit(error)