atexit.register(exit_func) queueName = sys.argv[1] queueConfigMapper = QueueConfigMapper() queueConfig = queueConfigMapper.get_queue(queueName) fileSpec = FileSpec() fileSpec.fileType = 'output' fileSpec.lfn = file_prefix + uuid.uuid4().hex + '.gz' fileSpec.fileAttributes = {'guid': str(uuid.uuid4())} fileSpec.checksum = '0d439274' assFileSpec = FileSpec() assFileSpec.lfn = file_prefix + uuid.uuid4().hex assFileSpec.fileType = 'es_output' assFileSpec.fsize = random.randint(10, 100) assFileSpec.path = os.getcwd() + '/' + assFileSpec.lfn oFile = open(assFileSpec.lfn, 'w') oFile.write(''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(assFileSpec.fsize))) oFile.close() fileSpec.add_associated_file(assFileSpec) jobSpec = JobSpec() jobSpec.jobParams = {'outFiles': fileSpec.lfn + ',log', 'scopeOut': 'panda', 'scopeLog': 'panda', 'logFile': 'log', 'realDatasets': 'panda.' + fileSpec.lfn, 'ddmEndPointOut': 'BNL-OSG2_DATADISK', } jobSpec.add_out_file(fileSpec)
print "Initial queueConfig.stager = ", queueConfig.stager queueConfig.stager['module'] = 'pandaharvester.harvesterstager.go_stager' queueConfig.stager['name'] = 'GlobusStager' print "Modified queueConfig.stager = ", queueConfig.stager scope = 'panda' fileSpec = FileSpec() fileSpec.fileType = 'es_output' fileSpec.lfn = 'panda.sgotest.' + uuid.uuid4().hex + '.gz' fileSpec.fileAttributes = {} assFileSpec = FileSpec() assFileSpec.lfn = 'panda.sgotest.' + uuid.uuid4().hex assFileSpec.fileType = 'es_output' assFileSpec.fsize = random.randint(10, 100) # create source file hash = hashlib.md5() hash.update('%s:%s' % (scope, fileSpec.lfn)) hash_hex = hash.hexdigest() correctedscope = "/".join(scope.split('.')) assFileSpec.path = "{endPoint}/{scope}/{hash1}/{hash2}/{lfn}".format( endPoint=queueConfig.stager['Globus_srcPath'], scope=correctedscope, hash1=hash_hex[0:2], hash2=hash_hex[2:4], lfn=assFileSpec.lfn) if not os.path.exists(os.path.dirname(assFileSpec.path)): print "os.makedirs({})".format(os.path.dirname(assFileSpec.path)) os.makedirs(os.path.dirname(assFileSpec.path)) oFile = open(assFileSpec.path, 'w')
import sys import uuid from pandaharvester.harvestercore.job_spec import JobSpec from pandaharvester.harvestercore.file_spec import FileSpec from pandaharvester.harvestercore.event_spec import EventSpec from pandaharvester.harvestercore.communicator_pool import CommunicatorPool rID = sys.argv[1] taskid = rID.split('-')[0] pandaid = long(rID.split('-')[1]) job = JobSpec() job.PandaID = pandaid event = EventSpec() file = FileSpec() file.status = 'finished' file.objstoreID = 9575 file.pathConvention = 1000 file.lfn = str(uuid.uuid4().hex) + '.zip' file.fsize = 555 file.chksum = '0d2a9dc9' event.eventRangeID = rID event.eventStatus = 'finished' job.zipEventMap = {1: {'events':[event], 'zip':file}} a = CommunicatorPool() a.update_jobs([job])