def getSplitting(requestName):
	reqmgrUrl='https://cmsweb.cern.ch/reqmgr/reqMgr/'
	reqmgr = RequestManager(dict = {'endpoint' : reqmgrUrl})
	result = reqmgr.getRequest(requestName)
	workloadDB = Database(result['CouchWorkloadDBName'], result['CouchURL'])
	workloadPickle = workloadDB.getAttachment(requestName, 'spec')
	spec = pickle.loads(workloadPickle)
	workload = WMWorkloadHelper(spec)
	params = workload.getTopLevelTask()[0].jobSplittingParameters()
	algo = workload.getTopLevelTask()[0].jobSplittingAlgorithm()
	return params['events_per_job']
def getSplitting(requestName):
    reqmgrUrl = 'https://cmsweb.cern.ch/reqmgr/reqMgr/'
    reqmgr = RequestManager(dict={'endpoint': reqmgrUrl})
    result = reqmgr.getRequest(requestName)
    workloadDB = Database(result['CouchWorkloadDBName'], result['CouchURL'])
    workloadPickle = workloadDB.getAttachment(requestName, 'spec')
    spec = pickle.loads(workloadPickle)
    workload = WMWorkloadHelper(spec)
    params = workload.getTopLevelTask()[0].jobSplittingParameters()
    algo = workload.getTopLevelTask()[0].jobSplittingAlgorithm()
    return params['events_per_job']
def main():
    demPolicy = Block()
    reqmgr = RequestManager(dict = {'endpoint' : 'https://cmsweb.cern.ch/reqmgr/reqMgr'})
    result = reqmgr.getRequest('pdmvserv_HIG-Summer12DR53X-01392_T1_ES_PIC_MSS_1_v0__130724_063344_7207')
    workloadDB = Database(result['CouchWorkloadDBName'], result['CouchURL'])
    workloadPickle = workloadDB.getAttachment('pdmvserv_HIG-Summer12DR53X-01392_T1_ES_PIC_MSS_1_v0__130724_063344_7207', 'spec')
    spec = pickle.loads(workloadPickle)
    workload = WMWorkloadHelper(spec)
    x,y = demPolicy(wmspec = workload, task = workload.getTopLevelTask()[0])
    print x
    print y
Beispiel #4
0
    def createResubmitSpec(self, serverUrl, couchDB):
        """
        _createResubmitSpec_
        Create a bogus resubmit workload.
        """
        self.site = "cmssrm.fnal.gov"
        workload = WMWorkloadHelper(WMWorkload("TestWorkload"))
        reco = workload.newTask("reco")
        workload.setOwnerDetails(name = "evansde77", group = "DMWM")

        # first task uses the input dataset
        reco.addInputDataset(primary = "PRIMARY", processed = "processed-v1", tier = "TIER1")
        reco.data.input.splitting.algorithm = "File"
        reco.setTaskType("Processing")
        cmsRunReco = reco.makeStep("cmsRun1")
        cmsRunReco.setStepType("CMSSW")
        reco.applyTemplates()
        cmsRunRecoHelper = cmsRunReco.getTypeHelper()
        cmsRunRecoHelper.addOutputModule("outputRECO",
                                        primaryDataset = "PRIMARY",
                                        processedDataset = "processed-v2",
                                        dataTier = "TIER2",
                                        lfnBase = "/store/dunkindonuts",
                                        mergedLFNBase = "/store/kfc")
        
        dcs = DataCollectionService(url = serverUrl, database = couchDB)

        def getJob(workload):
            job = Job()
            job["task"] = workload.getTask("reco").getPathName()
            job["workflow"] = workload.name()
            job["location"] = self.site
            job["owner"] = "evansde77"
            job["group"] = "DMWM"
            return job

        testFileA = WMFile(lfn = makeUUID(), size = 1024, events = 1024)
        testFileA.setLocation([self.site])
        testFileA.addRun(Run(1, 1, 2))
        testFileB = WMFile(lfn = makeUUID(), size = 1024, events = 1024)
        testFileB.setLocation([self.site])
        testFileB.addRun(Run(1, 3, 4))
        testJobA = getJob(workload)
        testJobA.addFile(testFileA)
        testJobA.addFile(testFileB)
        
        dcs.failedJobs([testJobA])
        topLevelTask = workload.getTopLevelTask()[0]
        workload.truncate("Resubmit_TestWorkload", topLevelTask.getPathName(), 
                          serverUrl, couchDB)
                                  
        return workload
def retrieveSchema(workflowName):
    specURL = os.path.join(reqmgrCouchURL, workflowName, "spec")
    helper = WMWorkloadHelper()
    print "  retrieving original workflow...",
    helper.load(specURL)
    print "done."
    schema = {}
    for (key, value) in helper.data.request.schema.dictionary_().iteritems():
        if value != None:
            schema[key] = value
    schema["Requestor"] = "linacre"
    schema["Group"] = "DATAOPS"
    del schema["RequestName"]
    del schema["CouchDBName"]
    del schema["CouchURL"]

    assign = {}
    assign["unmergedLFNBase"] = helper.data.properties.unmergedLFNBase
    assign["mergedLFNBase"] = helper.data.properties.mergedLFNBase
    assign["processingVersion"] = helper.data.properties.processingVersion
    assign["dashboardActivity"] = helper.data.properties.dashboardActivity
    assign["acquisitionEra"] = helper.data.properties.acquisitionEra

    topLevelTask = helper.getTopLevelTask()[0]
    assign["SiteWhitelist"] = topLevelTask.siteWhitelist()

    mergeTask = None
    for mergeTask in topLevelTask.childTaskIterator():
        if mergeTask.taskType() == "Merge":
            if mergeTask.getPathName().find("DQM") == -1:
                break

    assign["MinMergeSize"] = mergeTask.jobSplittingParameters(
    )["min_merge_size"]
    assign["MaxMergeSize"] = mergeTask.jobSplittingParameters(
    )["max_merge_size"]
    assign["MaxMergeEvents"] = mergeTask.jobSplittingParameters().get(
        "max_merge_events", 50000)

    return (schema, assign)
def main():
    toCheckList = '/home/dballest/Dev-Workspace/dev-scripts/data/upgrade-vocms85-613.txt'
    handle = open(toCheckList, 'r')
    timePerJobFile = open('/home/dballest/Dev-Workspace/dev-scripts/data/upgrade-vocms85-tpj.data', 'w')
    eventsPerLumiFile = open('/home/dballest/Dev-Workspace/dev-scripts/data/upgrade-vocms85-epl.data', 'w')
    lumisPerMergeFile = open('/home/dballest/Dev-Workspace/dev-scripts/data/upgrade-vocms85-lpm.data', 'w')
    count = 0
    for request in handle:
        z = WMWorkloadHelper()
        z.load('https://cmsweb.cern.ch/couchdb/reqmgr_workload_cache/%s/spec' % request.strip())
        schema = z.data.request.schema
        requesType = schema.RequestType
        if requesType != 'MonteCarlo':
            continue
        timePerEvent = schema.TimePerEvent
        sizePerEvent = schema.SizePerEvent
        events = schema.RequestNumEvents
        eff = getattr(schema, "FilterEfficiency", 1.0)
        topTask = z.getTopLevelTask()[0]
        eventsPerJob = topTask.data.input.splitting.events_per_job
        if eff < 1.0:
            count += 1
            for childTask in topTask.childTaskIterator():
                if childTask.data.taskType == 'Merge':
                    mergeSizeLimit = childTask.data.input.splitting.max_merge_size
                    mergeEventLimit = childTask.data.input.splitting.max_merge_events
                    break
            sizeOfZeroEvent = 131091.0
            sizePerLumi = eventsPerJob*sizePerEvent*eff
            lumisPerMergedBySize = mergeSizeLimit/(sizePerLumi + sizeOfZeroEvent)
            lumisPerMergedByEvent = mergeEventLimit/(eventsPerJob*eff)
            timePerJobFile.write("%f\n" % (timePerEvent * eventsPerJob))
            eventsPerLumiFile.write("%f\n" % (eventsPerJob*eff))
            lumisPerMergeFile.write("%f\n" % min(lumisPerMergedBySize, lumisPerMergedByEvent))
    print count
    handle.close()
    timePerJobFile.close()
    eventsPerLumiFile.close()
    lumisPerMergeFile.close()
def retrieveSchema(workflowName):
    specURL = os.path.join(reqmgrCouchURL, workflowName, "spec")
    helper = WMWorkloadHelper()
    print "  retrieving original workflow...",
    helper.load(specURL)
    print "done."
    schema = {}
    for (key, value) in helper.data.request.schema.dictionary_().iteritems():
        if value != None:
            schema[key] = value
    schema["Requestor"] = "linacre"
    schema["Group"] = "DATAOPS"
    del schema["RequestName"]
    del schema["CouchDBName"]
    del schema["CouchURL"]

    assign = {}
    assign["unmergedLFNBase"] = helper.data.properties.unmergedLFNBase
    assign["mergedLFNBase"] = helper.data.properties.mergedLFNBase
    assign["processingVersion"] = helper.data.properties.processingVersion
    assign["dashboardActivity"] = helper.data.properties.dashboardActivity
    assign["acquisitionEra"] = helper.data.properties.acquisitionEra

    topLevelTask = helper.getTopLevelTask()[0]
    assign["SiteWhitelist"] = topLevelTask.siteWhitelist()

    mergeTask = None
    for mergeTask in topLevelTask.childTaskIterator():
        if mergeTask.taskType() == "Merge":
            if mergeTask.getPathName().find("DQM") == -1:
                break
        
    assign["MinMergeSize"] = mergeTask.jobSplittingParameters()["min_merge_size"]
    assign["MaxMergeSize"] = mergeTask.jobSplittingParameters()["max_merge_size"]
    assign["MaxMergeEvents"] = mergeTask.jobSplittingParameters().get("max_merge_events", 50000)
    
    return (schema, assign)