prodSourceLabel = taskSpec.prodSourceLabel queueID = taskSpec.workQueue_ID workQueue = tbIF.getWorkQueueMap().getQueueWithID(queueID) brokerageLockIDs = ListWithLock([]) threadPool = ThreadPool() # get typical number of files typicalNumFilesMap = tbIF.getTypicalNumInput_JEDI(vo,prodSourceLabel,workQueue, useResultCache=600) tmpListList = tbIF.getTasksToBeProcessed_JEDI(None,vo,workQueue, prodSourceLabel, cloudName,nFiles=10,simTasks=[jediTaskID], fullSimulation=True, typicalNumFilesMap=typicalNumFilesMap, simDatasets=datasetID) taskSetupper = TaskSetupper(vo,prodSourceLabel) taskSetupper.initializeMods(tbIF,ddmIF) for dummyID,tmpList in tmpListList: for taskSpec,cloudName,inputChunk in tmpList: jobBroker = JobBroker(taskSpec.vo,taskSpec.prodSourceLabel) tmpStat = jobBroker.initializeMods(ddmIF.getInterface(vo),tbIF) jobBroker.setTestMode(taskSpec.vo,taskSpec.prodSourceLabel) splitter = JobSplitter() gen = JobGeneratorThread(None,threadPool,tbIF,ddmIF,siteMapper,False,taskSetupper,None, None,None,None,brokerageLockIDs)
ddmIF.setupInterface() jediTaskID = int(sys.argv[1]) s, taskSpec = tbIF.getTaskWithID_JEDI(jediTaskID, False) body = TaskBroker(None, tbIF, ddmIF, taskSpec.vo, taskSpec.prodSourceLabel) body.initializeMods(tbIF, ddmIF) taskParam = tbIF.getTaskParamsWithID_JEDI(jediTaskID) taskParamMap = RefinerUtils.decodeJSON(taskParam) vo = taskParamMap['vo'] prodSourceLabel = taskParamMap['prodSourceLabel'] taskType = taskParamMap['taskType'] workQueueMapper = tbIF.getWorkQueueMap() workQueue = workQueueMapper.getQueueWithIDGshare(taskSpec.workQueue_ID, taskSpec.gshare) impl = body.getImpl(vo, prodSourceLabel) tmpListItem = tbIF.getTasksToBeProcessed_JEDI(None, None, None, None, None, simTasks=[jediTaskID], readMinFiles=True) impl.doBrokerage(tmpListItem, taskSpec.vo, taskSpec.prodSourceLabel, workQueue, taskSpec.resource_type)
workQueue = tbIF.getWorkQueueMap().getQueueWithID(queueID, gshare_name) threadPool = ThreadPool() # get typical number of files #typicalNumFilesMap = tbIF.getTypicalNumInput_JEDI(vo,prodSourceLabel,workQueue, # useResultCache=600) typicalNumFilesMap = {} tmpListList = tbIF.getTasksToBeProcessed_JEDI( None, vo, workQueue, prodSourceLabel, cloudName, nFiles=10, simTasks=[jediTaskID], fullSimulation=True, typicalNumFilesMap=typicalNumFilesMap, simDatasets=datasetIDs) taskSetupper = TaskSetupper(vo, prodSourceLabel) taskSetupper.initializeMods(tbIF, ddmIF) for dummyID, tmpList in tmpListList: for taskSpec, cloudName, inputChunk in tmpList: jobBroker = JobBroker(taskSpec.vo, taskSpec.prodSourceLabel) tmpStat = jobBroker.initializeMods(ddmIF.getInterface(vo), tbIF) splitter = JobSplitter() gen = JobGeneratorThread(None, threadPool, tbIF, ddmIF, siteMapper,
jediTaskID = int(sys.argv[1]) # get task attributes s, taskSpec = tbIF.getTaskWithID_JEDI(jediTaskID) pid = '{0}-{1}_{2}-sgen'.format(socket.getfqdn().split('.')[0], os.getpid(), os.getpgrp()) vo = taskSpec.vo prodSourceLabel = taskSpec.prodSourceLabel workQueue = tbIF.getWorkQueueMap().getQueueWithIDGshare( taskSpec.workQueue_ID, taskSpec.gshare) # get inputs tmpList = tbIF.getTasksToBeProcessed_JEDI(pid, None, workQueue, None, None, nFiles=1000, target_tasks=[jediTaskID]) inputList = ListWithLock(tmpList) # create thread threadPool = ThreadPool() taskSetupper = TaskSetupper(vo, prodSourceLabel) taskSetupper.initializeMods(tbIF, ddmIF) gen = JobGeneratorThread(inputList, threadPool, tbIF, ddmIF, siteMapper, True, taskSetupper, pid, workQueue, 'sgen', None, None, None, False) gen.start() gen.join()