def closeOutRedigiWorkflows(url, workflows):
    """
    Closes out a list of redigi workflows
    """
    noSiteWorkflows = []
    for workflow in workflows:
        closeOutWorkflow = True
        inputDataset = reqMgrClient.getInputDataSet(url, workflow)
        datasets = reqMgrClient.outputdatasetsWorkflow(url, workflow)
        for dataset in datasets:
            closeOutDataset = False
            percentage = percentageCompletion(url, workflow, dataset)
            phedexSubscription = phedexClient.hasCustodialSubscription(dataset)
            duplicate = None
            # if dataset has subscription and more than 95% events we check
            # duplicates
            if phedexSubscription and percentage >= float(0.95):    
                duplicate = dbs3Client.duplicateRunLumi(dataset)
                #if not duplicate events, dataset is ready
                if not duplicate:
                    closeOutDataset = True
                else:
                    closeOutDataset = False
            #validate when percentage is ok but has not phedex subscription
            if percentage >= float(0.95) and not phedexSubscription:
                noSiteWorkflows.append(workflow)
            #if at least one dataset is not ready wf cannot be closed out
            closeOutWorkflow = closeOutWorkflow and closeOutDataset
            print '| %80s | %100s | %4s | %5s| %3s | %5s|%5s| ' % (workflow, dataset,str(int(percentage*100)),
                                                    str(phedexSubscription), 100, duplicate, closeOutDataset)
        #workflow can only be closed out if all datasets are ready
        if closeOutWorkflow:
            reqMgrClient.closeOutWorkflowCascade(url, workflow)
    print '-'*180
    return noSiteWorkflows
def closeOutReRecoWorkflows(url, workflows):
    """
    Closeout ReReco workflows
    """
    noSiteWorkflows = []
    for workflow in workflows:
        if 'RelVal' in workflow:
            continue
        if 'TEST' in workflow:
            continue        
        datasets = reqMgrClient.outputdatasetsWorkflow(url, workflow)
        inputDataset = reqMgrClient.getInputDataSet(url, workflow)
        closeOutWorkflow = True
        #check if dataset is ready
        for dataset in datasets:
            duplicate = False
            closeOutDataset = True
            percentage = percentageCompletion(url, workflow, dataset)
            phedexSubscription = phedexClient.hasCustodialSubscription(dataset)
            closeOutDataset = False
            #dataset can be closed out only with 100% of events
            if percentage == 1 and phedexSubscription and not duplicate:
                closeOutDataset = True
            else:
                closeOutDataset = False
            
            #validate when percentage is ok but has not phedex subscription
            if percentage == 1 and not phedexSubscription:
                noSiteWorkflows.append(workflow)

            #if at least one dataset is not ready wf cannot be closed out
            closeOutWorkflow = closeOutWorkflow and closeOutDataset
            print '| %80s | %100s | %4s | %5s| %3s | %5s|%5s| ' % (workflow, dataset,str(int(percentage*100)),
                                                    str(phedexSubscription), 100, duplicate, closeOutDataset)
        #workflow can only be closed out if all datasets are ready
        if closeOutWorkflow:
            reqMgrClient.closeOutWorkflowCascade(url, workflow)
    print '-'*180
    return noSiteWorkflows
Beispiel #3
0
def main():
	url='cmsweb.cern.ch'	
	parser = optparse.OptionParser()
	parser.add_option('-f', '--filename', help='Filename',dest='filename')
	parser.add_option('-w', '--workflow', help='Workflow',dest='userWorkflow')
	parser.add_option('-t', '--team', help='Type of Requests',dest='team')
	parser.add_option('-s', '--site', help='Force workflow to run at this site. For HLT/AI just put use HLT.',dest='site')
	parser.add_option('-k', '--ignore-restrictions', help='Ignore site restrictions',action="store_true",dest='ignoresite')
        parser.add_option('-u', '--new-priority', help='Change workflow priority to #',dest='newpriority')
	parser.add_option('-c', '--custodial', help='Custodial site',dest='siteCust')
	parser.add_option('-p', '--procstring', help='Process String',dest='inprocstring')
	parser.add_option('-m', '--procversion', help='Process Version',dest='inprocversion')
	parser.add_option('-e', '--execute', help='Actually assign workflows',action="store_true",dest='execute')
	parser.add_option('-x', '--restrict', help='Only assign workflows for this site',dest='restrict')
	parser.add_option('-z', '--threshold', help='Threshold for completeness of input dataset at site',dest='threshold')
	parser.add_option('-r', '--rssmax', help='Max RSS',dest='maxRSS')
	parser.add_option('-v', '--vsizemax', help='Max VMem',dest='maxVSize')
        parser.add_option('-o', '--xrootd', help='Read input using xrootd',action="store_true",dest='xrootd')
        parser.add_option('-i', '--ignore', help='Ignore any errors',action="store_true",dest='ignore')
	(options,args) = parser.parse_args()

	activity='reprocessing'

        if not options.restrict:
                restrict='None'
        else:
                restrict=options.restrict
        maxRSS = 2800000
        if not options.maxRSS:
                maxRSS = 3000000
        else:
                maxRSS=options.maxRSS
	maxRSSdefault = maxRSS
        maxVSize = 4100000000
        if not options.maxVSize:
                maxVSize = 4100000000
        else:
                maxVSize=options.maxVSize
	filename=options.filename

        ignore = 0
        if options.ignore:
           ignore = 1

        ignoresiterestrictions = 0
        if options.ignoresite:
           ignoresiterestrictions = 1
           
        if not options.newpriority:
           newpriority=0
        else: 
           newpriority=options.newpriority

        # Valid Tier-1 sites
        sites = ['T1_DE_KIT', 'T1_FR_CCIN2P3', 'T1_IT_CNAF', 'T1_ES_PIC', 'T1_TW_ASGC', 'T1_UK_RAL', 'T1_US_FNAL', 'T1_RU_JINR', 'T2_CH_CERN', 'HLT']

        # only assign workflows from these campaigns
        valids = ['Fall11R1', 'Fall11R2', 'Fall11R4', 'Spring14dr', 'Fall13dr', 'Summer12DR53X', 'pAWinter13DR53X', 'Cosmic70DR', 'HiFall13DR53X', 'Phys14DR', 'Summer11LegDR','Fall14DR', 'Fall14DR73', 'TP2023SHCALDR', '2019GEMUpg14DR', 'HiWinter13DR53X', 'RunIWinter15DR', '2023SHCALUpg14DR']

        # Tier-1s with no tape left, so use CERN instead
        sitesNoTape = ['T1_RU_JINR']

        if options.filename:
           f = open(filename,'r')
        elif options.userWorkflow:
           f = [options.userWorkflow]
        else:
           f = getWorkflows(url)

        workflowsNotAssignedInput = []
        workflowsAssigned = {}

        for workflow in f:
           workflow = workflow.rstrip('\n')

           if not options.xrootd:
              useX = 0
           else:
              useX = 1

           # Double check that the workflow really is in assignment-approved
           currentStatus = getCurrentStatus(url, workflow)
           if currentStatus != 'assignment-approved':
              print 'NOTE: Due to workflow status (',currentStatus,') skipping',workflow
              continue

           # Only automatically assign workflows from specified campaigns
           campaign = getCampaign(url, workflow)
           if campaign not in valids and not options.userWorkflow and not options.filename:
              print 'NOTE: Due to campaign skipping',workflow
              continue

           siteUse=options.site
           if siteUse == 'T2_US':
              siteUse =  ['T2_US_Caltech', 'T2_US_Florida', 'T2_US_MIT', 'T2_US_Nebraska', 'T3_US_Omaha', 'T2_US_Purdue', 'T2_US_UCSD', 'T2_US_Vanderbilt', 'T2_US_Wisconsin']
              if not options.siteCust:
                 print 'ERROR: A custodial site must be specified'
                 continue
              siteCust = options.siteCust

           # Check status of input dataset
           inputDataset = reqMgrClient.getInputDataSet(url, workflow)
           inputDatasetStatus = getDatasetStatus(inputDataset)
           if inputDatasetStatus != 'VALID' and inputDatasetStatus != 'PRODUCTION':
              print 'ERROR: Unable to assign',workflow,' because input dataset is not PRODUCTION or VALID, value is',inputDatasetStatus
              continue

           if not siteUse or siteUse == 'None':
              # Find site to run workflow if no site specified
              threshold = 98.0
              if options.threshold:
                 threshold = options.threshold
              [siteUse,completeness] = getSiteWithMostInput(inputDataset, threshold)
              if siteUse == 'None' or ('T1_' not in siteUse and 'T2_CH_CERN' not in siteUse and campaign != 'Spring14miniaod'):
                 workflowsNotAssignedInput.append(workflow)
                 continue
              if completeness < 100.0:
                 print 'Input dataset is < 100% complete (',completeness,') so enabling xrootd'
                 useX = 1
     
           # Set the custodial location if necessary
           if not options.site or options.site != 'T2_US':
              if not options.siteCust:
                 siteCust = siteUse
              else:
                 siteCust = options.siteCust
           if options.site == 'HLT':
              siteUse = ['T2_CH_CERN_HLT', 'T2_CH_CERN']

           # Some sites have no free space on tape, so send the data to CERN
           if siteUse in sitesNoTape:
              siteCust = 'T0_CH_CERN'

           # Don't specify a custodial site for miniaod
           if campaign == 'Spring14miniaod':
              siteCust = 'None'

           # Check if input dataset subscribed to disk endpoint
           siteSE = siteUse
           if 'T1' in siteUse:
              siteSE = siteSE + '_Disk'
           [subscribedOurSite, subscribedOtherSite] = checkAcceptedSubscriptionRequest(url, inputDataset, siteSE)
           if not subscribedOurSite and not options.xrootd and not ignore:
              print 'ERROR: input dataset not subscribed/approved to required Disk endpoint and xrootd option not enabled (',subscribedOurSite,subscribedOtherSite,workflow,siteSE,')'
              workflowsNotAssignedInput.append(workflow)
              continue
           if options.xrootd and not subscribedOtherSite and not ignore:
              print 'ERROR: input dataset not subscribed/approved to any Disk endpoint (',subscribedOurSite,subscribedOtherSite,')'
              continue

           # Check if pileup dataset subscribed to disk endpoint
           pileupDataset = getPileupDataset(url, workflow)
           if pileupDataset != 'None':
              [subscribedOurSite, subscribedOtherSite] = checkAcceptedSubscriptionRequest(url, pileupDataset, siteSE)
              if not subscribedOurSite and not ignore:
                 print 'ERROR: pileup dataset (',pileupDataset,') not subscribed/approved to required Disk endpoint',siteSE,' for workflow',workflow
                 continue
         
           # Decide which team to use if not already defined
           # - currently we only use production for all workflows
           if options.team:
              team = options.team
           else:
              team = 'production'

           # Get LFN base from input dataset
           lfn = getLFNbase(url, inputDataset)

	   # Set maxRSS
	   maxRSS = maxRSSdefault
           if ('HiFall11' in workflow or 'HiFall13DR53X' in workflow) and 'IN2P3' in siteUse:
              maxRSS = 4000000

           # Set max number of merge events
           maxmergeevents = 50000
           if 'Fall11R1' in workflow:
              maxmergeevents = 6000
           if 'DR61SLHCx' in workflow:
              maxmergeevents = 5000

           # Acquisition era
           era = getCampaign(url, workflow)

           # Correct situations where campaign name cannot be used as acquisition era
           if era == '2019GEMUpg14DR':
              era = 'GEM2019Upg14DR'
           if era == '2023SHCALUpg14DR':
              era = 'SHCAL2023Upg14DR'

           if era == 'None':
              print 'ERROR: unable to get campaign for workflow', workflow
              continue

           # Processing string
           if options.inprocstring: 
              procstring = options.inprocstring
           else:
              procstring = getProcString(url, workflow)

           # ProcessingVersion
           if not options.inprocversion:
              procversion = getDatasetVersion(url, workflow, era, procstring)
           else:
              procversion = options.inprocversion
 
           # Handle run-dependent MC
           if 'PU_RD' in procstring:
              numEvents = getRequestNumEvents(url, workflow)
              reqJobs = 500
              if 'PU_RD2' in procstring:
                 reqJobs = 2000
              eventsPerJob = int(numEvents/(reqJobs*1.4))
              if eventsPerJob < 2000:
                 if options.execute:
                    print 'Changing splitting to',eventsPerJob,'events per job'
                    changeSplitting(url, workflow, eventsPerJob)
                 else:
                    print 'Would change splitting to',eventsPerJob,'events per job'

           # Site checking
           if siteUse not in sites and options.site != 'T2_US' and siteUse != ['T2_CH_CERN_T0', 'T2_CH_CERN_HLT', 'T2_CH_CERN'] and not ignoresiterestrictions and siteUse != ['T2_CH_CERN_HLT', 'T2_CH_CERN']:
              if 'AODSIM' not in inputDataset:
                 print 'ERROR: invalid site',siteUse
                 continue

           workflowsAssigned[workflow] = siteUse

           if options.execute:
              if restrict == 'None' or restrict == siteUse:
	          assignRequest(url, workflow, team, siteUse, era, procversion, procstring, activity, lfn, maxmergeevents, maxRSS, maxVSize, useX, siteCust)
                  if (newpriority !=0 ):
                     changePriorityWorkflow(url,workflow,newpriority)
                     print "Priority reset to %i" % newpriority
              else:
                     print 'Skipping workflow ',workflow
           else:
              if restrict == 'None' or restrict == siteUse:
                 print 'Would assign ',workflow,' with ','Acquisition Era:',era,'ProcessingString:',procstring,'ProcessingVersion:',procversion,'lfn:',lfn,'Site(s):',siteUse,'Custodial Site:',siteCust,'team:',team
                 if (newpriority !=0 ):
                    print "Would reset priority to %i" % newpriority
              else:
                 print 'Would skip workflow ',workflow

        print ''
        print 'SUMMARY'
        print ''

        # List assigned workflows
        if len(workflowsAssigned) > 0:
           if options.execute:
              print 'Workflows assigned:'
           else:
              print 'Workflows which can be assigned:'
           for workflow in workflowsAssigned:
              print ' ',workflow,workflowsAssigned[workflow]
     
        # List workflows not assigned because of input dataset
        if len(workflowsNotAssignedInput) > 0:
           print 'Workflows not assigned because input datasets are not complete on any site:'
           for workflow in workflowsNotAssignedInput:
              print ' ',workflow

	sys.exit(0)
def main():
	url='cmsweb.cern.ch'	
	parser = optparse.OptionParser()
	parser.add_option('-f', '--filename', help='Filename',dest='filename')
	parser.add_option('-w', '--workflow', help='Workflow',dest='userWorkflow')
	parser.add_option('-t', '--team', help='Type of Requests',dest='team')
	parser.add_option('-s', '--site', help='Force workflow to run at this site. For HLT/AI just put use HLT.',dest='site')
	parser.add_option('-c', '--custodial', help='Custodial site',dest='siteCust')
	parser.add_option('-p', '--procstring', help='Process String',dest='inprocstring')
	parser.add_option('-m', '--procversion', help='Process Version',dest='inprocversion')
	parser.add_option('-n', '--specialstring', help='Special Process String',dest='specialprocstring')
	parser.add_option('-e', '--execute', help='Actually assign workflows',action="store_true",dest='execute')
	parser.add_option('-x', '--restrict', help='Only assign workflows for this site',dest='restrict')
	parser.add_option('-r', '--rssmax', help='Max RSS',dest='maxRSS')
	parser.add_option('-v', '--vsizemax', help='Max VMem',dest='maxVSize')
	parser.add_option('-a', '--extension', help='Use _ext special name',dest='extension')
        parser.add_option('-o', '--xrootd', help='Read input using xrootd',action="store_true",dest='xrootd')
	(options,args) = parser.parse_args()
	if not options.filename and not options.userWorkflow:
		print "A filename or workflow is required"
		sys.exit(0)
	activity='reprocessing'
        if not options.restrict:
                restrict='None'
        else:
                restrict=options.restrict
        maxRSS = 2300000
        if not options.maxRSS:
                maxRSS = 2300000
        else:
                maxRSS=options.maxRSS
	maxRSSdefault = maxRSS
        maxVSize = 4100000000
        if not options.maxVSize:
                maxVSize = 4100000000
        else:
                maxVSize=options.maxVSize
	filename=options.filename

        if not options.xrootd:
           useX = 0
        else:
           useX = 1

        # Valid Tier-1 sites
        sites = ['T1_DE_KIT', 'T1_FR_CCIN2P3', 'T1_IT_CNAF', 'T1_ES_PIC', 'T1_TW_ASGC', 'T1_UK_RAL', 'T1_US_FNAL', 'T2_CH_CERN', 'HLT']

        if options.filename:
           f=open(filename,'r')
        else:
           f=[options.userWorkflow]

        for workflow in f:
           workflow = workflow.rstrip('\n')
           siteUse=options.site
           if siteUse == 'T2_US':
              siteUse =  ['T2_US_Caltech', 'T2_US_Florida', 'T2_US_MIT', 'T2_US_Nebraska', 'T3_US_Omaha', 'T2_US_Purdue', 'T2_US_UCSD', 'T2_US_Vanderbilt', 'T2_US_Wisconsin']
              if not options.siteCust:
                 print 'ERROR: A custodial site must be specified'
                 sys.exit(0)
              siteCust = options.siteCust

           team=options.team

           inputDataset = reqMgrClient.getInputDataSet(url, workflow)

           # Check status of input dataset
           inputDatasetStatus = getDatasetStatus(inputDataset)
           if inputDatasetStatus != 'VALID' and inputDatasetStatus != 'PRODUCTION':
              print 'ERROR: Input dataset is not PRODUCTION or VALID, value is',inputDatasetStatus
              sys.exit(0)

           if '-ext' in inputDataset and not options.extension:
              print 'WARNING: Input dataset is an extension and extension option is not specified'

           if not siteUse or siteUse == 'None':
              # Determine site where workflow should be run
              count=0
              for site in sites:
                 if site in workflow:
                    count=count+1
                    siteUse = site

              # Find custodial location of input dataset if workflow name contains no T1 site or multiple T1 sites
              if count==0 or count>1:
                 siteUse = findCustodialLocation(url, inputDataset)
                 if siteUse == 'None':
                    print 'ERROR: No custodial site found'
                    sys.exit(0)
                 siteUse = siteUse[:-4]
     
           # Set the custodial location if necessary
           if not options.site or options.site != 'T2_US':
              if not options.siteCust:
                 siteCust = siteUse
              else:
                 siteCust = options.siteCust
           if options.site == 'HLT':
              siteUse = ['T2_CH_CERN_AI', 'T2_CH_CERN_HLT', 'T2_CH_CERN']
              team = 'hlt'

           # Check if input dataset subscribed to disk endpoint
           if 'T2_CH_CERN' in siteUse:
              siteSE = 'T2_CH_CERN'
           else:
              siteSE = siteUse + '_Disk'
           [subscribedOurSite, subscribedOtherSite] = checkAcceptedSubscriptionRequest(url, inputDataset, siteSE)
           if not subscribedOurSite and not options.xrootd and 'Fall11R2' not in workflow:
              print 'ERROR: input dataset not subscribed/approved to required Disk endpoint'
              sys.exit(0)
           if options.xrootd and not subscribedOtherSite:
              print 'ERROR: input dataset not subscribed/approved to any Disk endpoint'
              sys.exit(0)

           # Extract required part of global tag
           gtRaw = getGlobalTag(url, workflow)
           gtBits = gtRaw.split('::')
           globalTag = gtBits[0]

           # Get campaign name
           campaign = getCampaign(url, workflow)

           pileupDataset = getPileupDataset(url, workflow)
           if pileupDataset != 'None':
              [subscribedOurSite, subscribedOtherSite] = checkAcceptedSubscriptionRequest(url, pileupDataset, siteSE)
              if not subscribedOurSite:
                 print 'ERROR: pileup dataset not subscribed/approved to required Disk endpoint'
                 sys.exit(0)            
         
           # Determine pileup scenario
           # - Fall11_R2 & Fall11_R4 don't add pileup so extract pileup scenario from input
           pileupScenario = ''
           if not options.inprocstring:
              pileupScenario = getPileupScenario(url, workflow)
              if campaign == 'Summer12_DR53X_RD':
                 pileupScenario = 'PU_RD1'
              if pileupScenario == 'Unknown' and 'MinBias' in pileupDataset and 'LowPU2010DR42' not in workflow:
                 print 'ERROR: unable to determine pileup scenario'
                 sys.exit(0)
              elif 'Fall11_R2' in workflow or 'Fall11_R4' in workflow or 'Fall11R2' in workflow or 'Fall11R4' in workflow:
                 matchObj = re.match(r".*Fall11-(.*)_START.*", inputDataset)
                 if matchObj:
                    pileupScenario = matchObj.group(1)
                 else:
                    pileupScenario == 'Unknown'
              elif pileupScenario == 'Unknown' and 'MinBias' not in pileupDataset:
                 pileupScenario = 'NoPileUp'

              if pileupScenario == 'Unknown':
                 pileupScenario = ''

           # Decide which team to use if not already defined
           if not team:
              priority = int(getPriority(url, workflow))
              if priority < 100000:
                 team = 'reproc_lowprio'
              else:
                 team = 'reproc_highprio'

           specialName = ''

           era = 'Summer12'
           lfn = '/store/mc'

           #delete era and lfn so it can't reuse the ones from the previous workflow
	   del era
	   del lfn

           # Set era, lfn and campaign-dependent part of name if necessary
           if 'Summer12_DR51X' in workflow:
              era = 'Summer12'
              lfn = '/store/mc'

           if 'Summer12_DR52X' in workflow:
              era = 'Summer12'
              lfn = '/store/mc'

           if 'Summer12_DR53X' in workflow or ('Summer12' in workflow and 'DR53X' in workflow):
              era = 'Summer12_DR53X'
              lfn = '/store/mc'

           #this is incorrect for HiFall11 workflows, but is changed further down
           if 'Fall11_R' in workflow or 'Fall11R' in workflow:
              era = 'Fall11'
              lfn = '/store/mc'

           if 'Summer13dr53X' in workflow:
              era = 'Summer13dr53X'
              lfn = '/store/mc'

           if 'Summer11dr53X' in workflow:
              era = 'Summer11dr53X'
              lfn = '/store/mc'

           if 'Fall11_HLTMuonia' in workflow:
              era = 'Fall11'
              lfn = '/store/mc'
              specialName = 'HLTMuonia_'

           if 'Summer11_R' in workflow:
              era = 'Summer11'
              lfn = '/store/mc'

           if 'LowPU2010_DR42' in workflow or 'LowPU2010DR42' in workflow:
              era = 'Summer12'
              lfn = '/store/mc'
              specialName = 'LowPU2010_DR42_'
              pileupScenario = 'PU_S0'

           if 'UpgradeL1TDR_DR6X' in workflow:
              era = 'Summer12'
              lfn = '/store/mc'

           if 'HiWinter13' in inputDataset:
              era = 'HiWinter13'
              lfn = '/store/himc'
     
           if 'Spring14dr' in workflow:
              era = 'Spring14dr'
              lfn = '/store/mc'
              if '_castor_' in workflow:
                 specialName = 'castor_'

           if 'Winter13' in workflow and 'DR53X' in workflow:
              era = 'HiWinter13'
              lfn = '/store/himc'
           if 'HiWinter13' in workflow and 'DR53X' in workflow:
              pileupScenario = ''  
           if 'pAWinter13' in workflow and 'DR53X' in workflow:
              pileupScenario = 'pa' # not actually the pileup scenario of course
           if 'ppWinter13' in workflow and 'DR53X' in workflow:
              pileupScenario = 'pp' # not actually the pileup scenario of course

           if 'Summer11LegDR' in campaign:
              era = 'Summer11LegDR'
              lfn = '/store/mc'

           if 'UpgradePhase1Age' in campaign:
              era = 'Summer13'
	      lfn = '/store/mc'
              specialName = campaign + '_'

           if campaign == 'UpgradePhase2LB4PS_2013_DR61SLHCx':
              era = 'Summer13'
              lfn = '/store/mc'
              specialName = campaign + '_'

           if campaign == 'UpgradePhase2BE_2013_DR61SLHCx':
              era = 'Summer13'
              lfn = '/store/mc'
              specialName = campaign + '_'

           if campaign == 'UpgradePhase2LB6PS_2013_DR61SLHCx':
              era = 'Summer13'
              lfn = '/store/mc'
              specialName = campaign + '_'
  
           if campaign == 'UpgradePhase1Age0DES_DR61SLHCx':
              era = 'Summer13'
              lfn = '/store/mc'
              specialName = campaign + '_'
           
           if campaign == 'UpgradePhase1Age0START_DR61SLHCx':
              era = 'Summer13'
              lfn = '/store/mc'
              specialName = campaign + '_'

           if campaign == 'UpgradePhase1Age3H_DR61SLHCx':
              era = 'Summer13'
              lfn = '/store/mc'
              specialName = campaign + '_'

           if campaign == 'UpgradePhase1Age5H_DR61SLHCx':
              era = 'Summer13'
              lfn = '/store/mc'
              specialName = campaign + '_'

           if campaign == 'UpgradePhase1Age1K_DR61SLHCx':
              era = 'Summer13'
              lfn = '/store/mc'
              specialName = campaign + '_'

           if campaign == 'UpgradePhase1Age3K_DR61SLHCx':
              era = 'Summer13'
              lfn = '/store/mc'
              specialName = campaign + '_'

           #change back to old campaign names for UpgradePhase1
           if 'UpgradePhase1Age' in campaign and 'dr61SLHCx' in specialName:
              specialName = specialName.replace("dr61SLHCx","_DR61SLHCx")
           if 'dr61SLHCx' in specialName:
              print 'WARNING: using new campaign name format'		   

           if campaign == 'HiFall11_DR44X' or campaign == 'HiFall11DR44':
              era = 'HiFall11'
              lfn = '/store/himc'
              specialName = 'HiFall11_DR44X' + '_'

           if campaign == 'HiFall13DR53X':
              era = 'HiFall13DR53X'
              lfn = '/store/himc'

           if campaign == 'UpgFall13d':
              era = campaign
              lfn = '/store/mc'

           if campaign == 'Fall13dr':
              era = campaign
              lfn = '/store/mc'
              if '_castor_tsg_' in workflow:
                 specialName = 'castor_tsg_'
              elif '_castor_' in workflow:
                 specialName = 'castor_'
              elif '_tsg_' in workflow:
                 specialName = 'tsg_'
              elif '__' in workflow:
                 specialName = ''
              else:
                 print 'ERROR: unexpected special name string in workflow name'
                 sys.exit(0)

           # Handle NewG4Phys
           if campaign == 'Summer12DR53X' and 'NewG4Phys' in workflow:
              specialName = 'NewG4Phys_'

           # Handle Ext30
           if campaign == 'Summer12DR53X' and 'Ext30' in workflow:
              specialName = 'Ext30_'

           # Handle BS2011
           if campaign == 'LowPU2010DR42' and 'BS2011' in workflow:
              specialName = 'LowPU2010_DR42_BS2011_'

           # Construct processed dataset version
           if pileupScenario != '':
              pileupScenario = pileupScenario+'_' 
           if options.specialprocstring:
              specialName = options.specialprocstring + '_'
           extTag = ''
           if options.extension:
              extTag = '_ext'+options.extension

           # ProcessingString
           if not options.inprocstring:
              procstring = specialName+pileupScenario+globalTag+extTag
           else:
              procstring = options.inprocstring

           # ProcessingVersion
           if not options.inprocversion:
              procversion = getDatasetVersion(url, workflow, era, procstring)
           else:
              procversion = options.inprocversion

	   #reset maxRSS to default, so it can't reuse the custom value from a previous workflow
	   maxRSS = maxRSSdefault
           if ('HiFall11' in workflow or 'HiFall13DR53X' in workflow) and 'IN2P3' in siteUse:
              maxRSS = 4000000

           # Set max number of merge events
           maxmergeevents = 50000
           #if 'Fall11_R1' in workflow:
           #   maxmergeevents = 6000
           if 'DR61SLHCx' in workflow:
              maxmergeevents = 5000

           # Checks
           if not era:
              print 'ERROR: era is not defined'
              sys.exit(0)

           if not lfn:
              print 'ERROR: lfn is not defined'
              sys.exit(0)

           if siteUse not in sites and options.site != 'T2_US' and siteUse != ['T2_CH_CERN_AI', 'T2_CH_CERN_HLT', 'T2_CH_CERN']:
              print 'ERROR: invalid site'
              sys.exit(0)

           if pileupScenario == 'Unknown':
              print 'ERROR: unable to determine pileup scenario'
              sys.exit(0)

           if options.execute:
              if restrict == 'None' or restrict == siteUse:
	         assignRequest(url, workflow, team, siteUse, era, procversion, procstring, activity, lfn, maxmergeevents, maxRSS, maxVSize, useX, siteCust)
              else:
                 print 'Skipping workflow ',workflow
           else:
              if restrict == 'None' or restrict == siteUse:
                 print 'Would assign ',workflow,' with ','Acquisition Era:',era,'ProcessingString:',procstring,'ProcessingVersion:',procversion,'lfn:',lfn,'Site(s):',siteUse,'Custodial Site:',siteCust,'team:',team,'maxmergeevents:',maxmergeevents,'maxRSS:',maxRSS
              else:
                 print 'Would skip workflow ',workflow

	sys.exit(0)