def main(): url = 'https://cmsweb.cern.ch' testbed_url = 'https://cmsweb-testbed.cern.ch' #url = 'https://alan-cloud1.cern.ch' #Create option parser usage = "usage: %prog [options] [WORKFLOW] TASK" parser = OptionParser(usage=usage) parser.add_option("-f","--file", dest="file", default=None, help="Text file of a list of workflows") parser.add_option("-m","--memory", dest="memory", default=None, help="Memory to override the original request memory") (options, args) = parser.parse_args() wfs = None if len(args) == 2: wfs = [args[0]] task = args[1] #list of files elif options.file and len(args) == 1: wfs = [l.strip() for l in open(options.file) if l.strip()] task = args[0] else: parser.error("Provide the Workflow Name and the Task Name") sys.exit(1) configJson = {"createRequest":{}} config = Config(configJson) reqMgrClient = ReqMgrClient(url, config) for wfname in wfs: #original wf info wf = rqMgr.Workflow(wfname) #set up acdc stuff if "ACDC" in wf.info["RequestString"]: config.requestArgs["createRequest"]["RequestString"] = wf.info["RequestString"] else: config.requestArgs["createRequest"]["RequestString"] = "ACDC_"+ wf.info["RequestString"] config.requestArgs["createRequest"]["PrepID"] = wf.info["PrepID"] config.requestArgs["createRequest"]["RequestPriority"] = wf.info["RequestPriority"] config.requestArgs["createRequest"]["OriginalRequestName"] = wf.name config.requestArgs["createRequest"]["InitialTaskPath"] = "/%s/%s"%(wf.name, task) config.requestArgs["createRequest"]["ACDCServer"] = "https://cmsweb.cern.ch/couchdb" config.requestArgs["createRequest"]["ACDCDatabase"] = "acdcserver" config.requestArgs["createRequest"]["TimePerEvent"] = wf.info["TimePerEvent"] if options.memory: config.requestArgs["createRequest"]["Memory"] = options.memory else: config.requestArgs["createRequest"]["Memory"] = wf.info["Memory"] config.requestArgs["createRequest"]["SizePerEvent"] = wf.info["SizePerEvent"] config.requestArgs["createRequest"]["RequestType"] = "Resubmission" config.requestArgs["createRequest"]["Group"] = wf.info["Group"] r = reqMgrClient.createRequest(config) print "Created:" print r
def main(): usage = "usage: %prog [options] workflow" parser = OptionParser(usage=usage) parser.add_option("-v","--verbose",action="store_true", dest="verbose", default=False, help="Show detailed info") parser.add_option("-l","--lumis",action="store_true", dest="checkLumis", default=False, help="Show lumis instead of events") parser.add_option("-f","--file", dest="fileName", default=None, help="Input file") (options, args) = parser.parse_args() if len(args) != 1 and options.fileName is None: parser.error("Provide the workflow name or a file") sys.exit(1) if options.fileName is None: workflows = [args[0]] else: workflows = [l.strip() for l in open(options.fileName) if l.strip()] for wf in workflows: print wf workflow = reqMgrClient.Workflow(wf, url) #by tyoe if workflow.type != 'TaskChain': #two step monte carlos (GEN and GEN-SIM) if workflow.type == 'MonteCarlo' and len(workflow.outputDatasets) == 2: percentageCompletion2StepMC(url, workflow, options.verbose, options.checkLumis) elif workflow.type == 'MonteCarloFromGEN': percentageCompletion(url, workflow, options.verbose, options.checkLumis, checkFilter=True) else: percentageCompletion(url, workflow, options.verbose, options.checkLumis, checkFilter=True) else: percentageCompletionTaskChain(url, workflow, options.verbose, options.checkLumis)
def filterOrphanAcdc(url, acdcs): orphans = [] for wfname in acdcs: acdc = reqMgrClient.Workflow(wfname) origwf = None #original workflow if 'OriginalRequestName' in acdc.info: origwf = acdc.info['OriginalRequestName'] elif 'OriginalRequestName' in acdc.cache: origwf = acdc.cache['OriginalRequestName'] if origwf: origwf = reqMgrClient.Workflow(origwf) if origwf.status != 'completed': orphans.append((origwf.name, origwf.status, acdc.name)) return orphans
def main(): # Create option parser usage = "\n python %prog [options] [WORKFLOW_NAME]"\ "WORKFLOW_NAME: if the list file is provided this should be empty\n" parser = OptionParser(usage=usage) parser.add_option("-a", "--action", dest="action", default='clone', help="There are two options clone (clone) or extend a worflow (extend) .") parser.add_option("-u", "--user", dest="user", help="User we are going to use", default=None) parser.add_option("-g", "--group", dest="group", default='DATAOPS', help="Group to send the workflows.") parser.add_option("-b", "--backfill", action="store_true", dest="backfill", default=False, help="Creates a clone for backfill test purposes.") parser.add_option("-v", "--verbose", action="store_true", dest="verbose", default=False, help="Prints all query information.") parser.add_option('-f', '--file', help='Text file with a list of workflows', dest='file') parser.add_option('--bwl', help='The block white list to be used', dest='bwl',default=None) #Extend workflow options parser.add_option('-e', '--events', help='# of events to add', dest='events') parser.add_option('-l', '--firstlumi', help='# of the first lumi', dest='firstlumi') parser.add_option("-m", "--memory", dest="memory", help="Set max memory for the event. At assignment, this will be used to calculate maxRSS = memory*1024") parser.add_option("--testbed", action="store_true", dest="testbed", default=False, help="Clone to testbed reqmgr insted of production") (options, args) = parser.parse_args() # Check the arguments, get info from them if options.file: wfs = [l.strip() for l in open(options.file) if l.strip()] elif len(args) > 0: # name of workflow wfs = [args[0]] else: parser.error("Provide the workflow of a file of workflows") sys.exit(1) if not options.user: # get os username by default uinfo = pwd.getpwuid(os.getuid()) user = uinfo.pw_name else: user = options.user if options.action == 'clone': for wf in wfs: workflow = reqMgrClient.Workflow(wf) if options.memory: memory = options.memory else: memory = workflow.info["Memory"] cloneWorkflow( wf, user, options.group, options.verbose, options.backfill, options.testbed, memory,bwl=options.bwl) elif options.action == 'extend': for wf in wfs: extendWorkflow(wf, user, options.group, options.verbose, options.events, options.firstlumi) sys.exit(0)
def main(): usage = 'python %prog [OPTIONS] [WORKFLOW]' parser = optparse.OptionParser(usage=usage) parser.add_option('-f', '--file', help='Text file with several workflows', dest='file') parser.add_option('-a', '--any', help='Any block replica', dest='anyb', action='store_true') parser.add_option('-d', '--dataset', help='A single dataset', dest='dataset', action='store_true') parser.add_option('-p', '--pileup', action="store_true", help='Look also for pileup location', dest='pileup') parser.add_option('-c', '--clean', help='Print ready to use site list', dest='clean', action="store_true", default=False) (options, args) = parser.parse_args() # if file if options.file: ls = [l.strip() for l in open(options.file) if l.strip()] elif len(args) == 1: ls = [args[0]] else: parser.error("Provide the workflow of a file of workflows") for x in ls: # if dataset given if options.dataset: printDsLocation(x, options.clean, options.anyb) else: print x workflow = reqMgrClient.Workflow(x) ds = getInputDataset(workflow) if not ds: print x, "Has no input dataset" continue printDsLocation(ds, options.clean, options.anyb) # pile ups if options.dataset and 'MCPileup' in workflow.info: pu = workflow.info['MCPileup'] print "Pile up:" printDsLocation(pu, options.clean, options.anyb)
def findIncludeParents(url, wfs): result = [] for r in wfs: try: wf = reqMgrClient.Workflow(r, url=url) if 'IncludeParents' in wf.info: #print wf.name, wf.info['IncludeParents'] if wf.info['IncludeParents'] == "True" or wf.info['IncludeParents'] is True: print wf.name, wf.status result.append(wf.name) else: #print "-",wf.name pass except AttributeError: #print "Error retrieving info for ", r pass return result
def makeACDC(url, workflow, task, memory=None): #original wf info wf = rqMgr.Workflow(workflow) #set up acdc stuff if "ACDC" in wf.info["RequestString"]: config.requestArgs["createRequest"]["RequestString"] = wf.info[ "RequestString"] else: config.requestArgs["createRequest"][ "RequestString"] = "ACDC_" + wf.info["RequestString"] config.requestArgs["createRequest"]["PrepID"] = wf.info["PrepID"] config.requestArgs["createRequest"]["RequestPriority"] = wf.info[ "RequestPriority"] config.requestArgs["createRequest"]["OriginalRequestName"] = wf.name config.requestArgs["createRequest"]["InitialTaskPath"] = "/%s/%s" % ( wf.name, task) config.requestArgs["createRequest"][ "ACDCServer"] = "https://cmsweb.cern.ch/couchdb" config.requestArgs["createRequest"]["ACDCDatabase"] = "acdcserver" config.requestArgs["createRequest"]["TimePerEvent"] = wf.info[ "TimePerEvent"] if memory: config.requestArgs["createRequest"]["Memory"] = memory else: config.requestArgs["createRequest"]["Memory"] = wf.info["Memory"] config.requestArgs["createRequest"]["SizePerEvent"] = wf.info[ "SizePerEvent"] config.requestArgs["createRequest"]["RequestType"] = "Resubmission" config.requestArgs["createRequest"]["Group"] = wf.info["Group"] r = reqMgrClient.createRequest(config) return r
def main(): usage = "\n python %prog [options] [WORKFLOW_NAME]\n" \ "WORKFLOW_NAME: if the list file is provided this should be empty\n" parser = OptionParser(usage=usage) parser.add_option('-f', '--file', help='Text file of workflows to Reject and Clone', dest='file') parser.add_option( '-c', '--clone', help='Are the workflows going to be cloned? The default value is False', action="store_true", dest='clone', default=False) parser.add_option('-i', '--invalidate', help='Invalidate datasets? The default value is False', action="store_true", dest='invalidate', default=False) parser.add_option( "-u", "--user", dest="user", help= "The user for creating the clone, if empty it will use the OS user running the script" ) parser.add_option( "-g", "--group", dest="group", default='DATAOPS', help= "The group for creating the clone, if empty it will, use 'DATAOPS' by default" ) parser.add_option( "-m", "--memory", dest="memory", help= "Set max memory for the clone. At assignment, this will be used to calculate maxRSS = memory*1024" ) (options, args) = parser.parse_args() # Check the arguments, get info from them if options.file: try: workflows = [l.strip() for l in open(options.file) if l.strip()] except: parser.error("Provide a valid file of workflows") sys.exit(1) elif len(args) > 0: # name of workflow workflows = [args[0]] else: parser.error("Provide the workflow of a file of workflows") sys.exit(1) if not options.user: # get os username by default uinfo = pwd.getpwuid(os.getuid()) user = uinfo.pw_name else: user = options.user for workflow in workflows: try: workflowInfo = reqMgrClient.Workflow(workflow) except: print("The workflow name: " + workflow + " is not valid.") continue # invalidates workflow print("Invalidating the workflow: " + workflow) reqMgrClient.invalidateWorkflow(url, workflow, workflowInfo.status) # invalidates datasets if options.invalidate: print("Invalidating datasets") datasets = reqMgrClient.outputdatasetsWorkflow(url, workflow) for dataset in datasets: print(dataset) dbs3.setDatasetStatus(dataset, 'INVALID', files=True) # clones workflow if options.clone: print("Cloning workflow: " + workflow) if options.memory: mem = options.memory else: mem = workflowInfo.info["Memory"] cloned = resubmit.cloneWorkflow(workflow, user, options.group, memory=mem) sys.exit(0)
def testWorkflowObj(self): # Creation of Workflow objects wfobj = reqMgr.Workflow(testwf, url=url) self.assertEqual(testwf, wfobj.name)
def main(): url = 'cmsweb.cern.ch' # Example: python assignWorkflow.py -w amaltaro_RVZTT_120404_163607_6269 # -t testbed-relval -s T1_US_FNAL -e CMSSW_6_0_0_pre1_FS_TEST_WMA -p v1 -a # relval -l /store/backfill/1 parser = optparse.OptionParser() parser.add_option( '-w', '--workflow', help='Workflow Name', dest='workflow') parser.add_option('-t', '--team', help='Type of Requests', dest='team') parser.add_option('-s', '--site', help='Site', dest='site') parser.add_option('-p', '--procversion',help='Processing Version', dest='procversion') parser.add_option('-a', '--activity',help='Dashboard Activity', dest='activity') parser.add_option('-f', '--file',help='File with workflows', dest='file') parser.add_option('-l', '--lfn', help='Merged LFN base', dest='lfn') parser.add_option('--special', help='Use it for special workflows. You also have to change the code according to the type of WF', dest='special') parser.add_option('-r', '--replica', action='store_true', dest='replica', default=False, help='Adds a _Disk Non-Custodial Replica parameter') parser.add_option("-v", "--verbose", action="store_true", dest="verbose", default=False, help="Prints all query information.") parser.add_option('-x', '--xrootd', help='Assign with trustSiteLocation=True (allows xrootd capabilities)', action='store_true', default=False, dest='xrootd') parser.add_option("--acqera", dest="acqera",help="Overrides Acquisition Era with a single string") parser.add_option("--procstr", dest="procstring",help="Overrides Processing String with a single string") parser.add_option('--test', action="store_true", help='Nothing is injected, only print infomation about workflow and AcqEra', dest='test') parser.add_option('--pu', action="store_true",help='Use it to inject PileUp workflows only', dest='pu') (options, args) = parser.parse_args() # Handling the workflow to assign if not options.workflow: if args: workflows = args elif options.file: workflows = [l.strip() for l in open(options.file) if l.strip()] else: parser.error("Input a workflow name or a file to read them") sys.exit(0) else: workflows = [options.workflow] # Handling the parameters given in the command line team = 'production' site = GOOD_SITES procversion = 1 activity = 'production' lfn = '/store/mc' acqera = {} procstring = {} specialStr = '' replica = False if options.team: team = options.team if options.site: site = options.site if site == "all": site = ALL_SITES elif site == "t1": site = T1S # parse sites separated by commas elif "," in site: site = site.split(",") if options.activity: activity = options.activity if options.lfn: lfn = options.lfn if options.replica: replica = True if options.acqera: acqera = options.acqera if options.procstring: procstring = options.procstring # Iterating over the set of Workflows for workflow in workflows: # Getting the original dictionary schema = getRequestDict(url, workflow) # Checking is the WF is in assignment-approved, it is mandatory to be assigned if (schema["RequestStatus"] != "assignment-approved"): print("The workflow '" + workflow + "' you are trying to assign is not in assignment-approved") sys.exit(1) # Dealing with the processing version wfInfo = reqMgr.Workflow(workflow, url=url) if options.procversion: procversion = int(options.procversion) else: procversion = wfInfo.info["ProcessingVersion"] # Setting the AcqEra and ProcStr values per Task for key, value in schema.items(): if type(value) is dict and key.startswith("Task"): try: procstring[value['TaskName']] = value[ 'ProcessingString'].replace("-", "_") acqera[value['TaskName']] = value['AcquisitionEra'] except KeyError: print("This request has no AcquisitionEra or ProcessingString defined into the Tasks, aborting...") sys.exit(1) # Adding the special string - in case it was provided in the command line if options.special: #specialStr = '_03Jan2013' specialStr = '_' + str(options.special) for key, value in procstring.items(): procstring[key] = value + specialStr # Check output dataset existence, and abort if they already do! datasets = schema["OutputDatasets"] i = 0 if schema["RequestType"] == "TaskChain": exist = False maxv = 1 for key, value in schema.items(): if type(value) is dict and key.startswith("Task"): dbsapi = DbsApi(url=dbs3_url) # list all datasets with same name but different version # numbers datasets = dbsapi.listDatasets(acquisition_era_name=value['AcquisitionEra'], primary_ds_name=value['PrimaryDataset'], detail=True, dataset_access_type='*') processedName = value['AcquisitionEra'] + \ '-' + value['ProcessingString'] + "-v\\d+" # see if any of the dataset names is a match for ds in datasets: if re.match(processedName, ds['processed_ds_name']): print "Existing dset:", ds['dataset'], "(%s)" % ds['dataset_access_type'] maxv = max(maxv, ds['processing_version']) exist = True else: pass i += 1 # suggest max version if exist and procversion <= maxv: print("Some output datasets exist, its advised to assign with v =="+ maxv + 1) sys.exit(0) #Checking if we are dealing with a TaskChain resubmission elif schema["RequestType"] == "Resubmission" and wfInfo.info["PrepID"].startswith("task"): procstring = wfInfo.info["ProcessingString"] acqera = wfInfo.info["AcquisitionEra"] else: print("The workflow '" + workflow + "' you are trying to assign is not a TaskChain, please use another resource.") sys.exit(1) # If the --test argument was provided, then just print the information # gathered so far and abort the assignment if options.test: print "%s \tAcqEra: %s \tProcStr: %s \tProcVer: %s" % (workflow, acqera, procstring, procversion) # print workflow, '\tAcqEra:', acqera, '\tProcStr:', procstring, # '\tProcVer:', procversion print "LFN: %s \tTeam: %s \tSite: %s" % (lfn, team, site) # print '\tTeam:',team, '\tSite:', site sys.exit(0) # Really assigning the workflow now print workflow, '\tAcqEra:', acqera, '\tProcStr:', procstring, '\tProcVer:', procversion, '\tTeam:', team, '\tSite:', site assignRequest(url, workflow, team, site, acqera, procstring, procversion, activity, lfn, replica, options.verbose, options.xrootd) sys.exit(0)
def main(): url = 'cmsweb.cern.ch' url_tb = 'cmsweb-testbed.cern.ch' usage = "usage: %prog [options] [WORKFLOW]" parser = optparse.OptionParser(usage=usage) parser.add_option('-t', '--team', help='Type of Requests', dest='team') parser.add_option('-s', '--sites', help='Site List, comma separated (no spaces),\ or "t1" for Tier-1\'s and "t2" for Tier-2\'s', dest='sites') parser.add_option('-r', '--replica', action='store_true', dest='replica', default=False, help='Adds a _Disk Non-Custodial Replica parameter') parser.add_option('-e', '--era', help='Acquistion era', dest='era') parser.add_option('-p', '--procversion', help='Processing Version, if empty it will leave the processing version\ that comes by defaul in the request', dest='procversion') parser.add_option('-a', '--activity', help='Dashboard Activity (reprocessing, production or test), if empty will\ set reprocessing as default', dest='activity') parser.add_option('-x', '--xrootd', help='Assign with trustSiteLocation=True (allows xrootd capabilities)', action='store_true', default=False, dest='xrootd') parser.add_option('-l', '--lfn', help='Merged LFN base', dest='lfn') parser.add_option('-f', '--file', help='Text file with a list of wokflows. If this option is used, the same settings will be\ applyed to all workflows', dest='file') parser.add_option('-v', '--verbose', help='Verbose', action='store_true', default=False, dest='verbose') parser.add_option('--testbed', help='Assign in testbed', action='store_true', default=False, dest='testbed') (options, args) = parser.parse_args() if options.testbed: url = url_tb if options.file: wfs = [l.strip() for l in open(options.file) if l.strip()] elif len(args) == 1: wfs = [args[0]] else: parser.error("Provide the workflow name or the file") if not options.team: parser.error("Provide the TEAM name") sys.exit(0) # parse site list if options.sites: if options.sites == "t1": sites = T1_SITES elif options.sites == "t2": sites = T2_SITES else: sites = [site for site in options.sites.split(',')] # default site list (all sites), comment out what you want to discard else: sites = ALL_SITES # activity reprocessing by default activity = 'reprocessing' if options.activity: activity = options.activity else: activity = 'reprocessing' #trustSiteListAsLocation = False trust_site = False if options.xrootd: trust_site = True team = options.team for wf in wfs: wf = rqMgr.Workflow(wf, url=url) # check options that were provided particularly if options.era: era = options.era else: era = wf.info['AcquisitionEra'] # lfn backfill by default if options.lfn: lfn = options.lfn elif "MergedLFNBase" in wf.info: lfn = wf.info['MergedLFNBase'] else: lfn = '/store/backfill/1' # given or default processing version if options.procversion: procversion = options.procversion else: procversion = wf.info['ProcessingVersion'] procstring = wf.info['ProcessingString'] assignRequest(url, wf.name, team, sites, era, procversion, activity, lfn, procstring, trust_site, options.replica, options.verbose) sys.exit(0)
def main(): url = 'cmsweb.cern.ch' url_tb = 'cmsweb-testbed.cern.ch' # Example: python assign.py -w amaltaro_RVZTT_120404_163607_6269 # -t testbed-relval -s T1_US_FNAL -e CMSSW_6_0_0_pre1_FS_TEST_WMA -p v1 -a # relval -l /store/backfill/1 usage = "usage: %prog [options] [WORKFLOW]" parser = optparse.OptionParser(usage=usage) parser.add_option('-t', '--team', help='Type of Requests', dest='team') parser.add_option('-s', '--sites', help=' "t1" for Tier-1\'s and "t2" for Tier-2\'s', dest='sites') parser.add_option( '--special', help= 'Use it for special workflows. You also have to change the code according to the type of WF', dest='special') parser.add_option('-r', '--replica', action='store_true', dest='replica', default=False, help='Adds a _Disk Non-Custodial Replica parameter') parser.add_option( '-p', '--procversion', help= 'Processing Version, if empty it will leave the processing version that comes by default in the request', dest='procversion') parser.add_option( '-a', '--activity', help= 'Dashboard Activity (reprocessing, production or test), if empty will set reprocessing as default', dest='activity') parser.add_option( '-x', '--xrootd', help='Assign with trustSiteLocation=True (allows xrootd capabilities)', action='store_true', default=False, dest='xrootd') parser.add_option('-l', '--lfn', help='Merged LFN base', dest='lfn') parser.add_option('-v', '--verbose', help='Verbose', action='store_true', default=False, dest='verbose') parser.add_option('--testbed', help='Assign in testbed', action='store_true', default=False, dest='testbed') parser.add_option( '--test', action="store_true", help= 'Nothing is injected, only print infomation about workflow and Era', dest='test') parser.add_option( '-f', '--file', help= 'Text file with a list of wokflows. If this option is used, the same settings will be applied to all workflows', dest='file') parser.add_option('-w', '--workflow', help='Workflow Name', dest='workflow') parser.add_option('-e', '--era', help='Acquistion era', dest='era') parser.add_option("--procstr", dest="procstring", help="Overrides Processing String with a single string") (options, args) = parser.parse_args() if options.testbed: url = url_tb # parse input workflows and files. If both -w and -f options are used, then only the -w inputs are considered. if not options.workflow: if args: wfs = args elif options.file: wfs = [l.strip() for l in open(options.file) if l.strip()] else: parser.error("Input a workflow name or a file to read them") sys.exit(0) else: wfs = [options.workflow] #Default values era = {} procversion = 1 procstring = {} replica = False sites = ALL_SITES specialStr = '' taskchain = False team = 'production' trust_site = False SI = siteInfo() # Handling the parameters given in the command line # parse site list if options.sites: if options.sites == "t1": sites = SI.sites_T1s elif options.sites == "t2": sites = SI.sites_T2s else: sites = [site for site in options.sites.split(',')] else: sites = SI.sites_T1s + SI.sites_T2s if options.team: team = options.team if options.xrootd: trust_site = True if options.replica: replica = True for wf in wfs: # Getting the original dictionary schema = getRequestDict(url, wf) wf = reqMgr.Workflow(wf, url=url) # WF must be in assignment-approved in order to be assigned if (schema["RequestStatus"] != "assignment-approved"): print("The workflow '" + wf.name + "' you are trying to assign is not in assignment-approved") sys.exit(1) #Check to see if the workflow is a task chain or an ACDC of a taskchain taskchain = (schema["RequestType"] == "TaskChain") or ( (schema["RequestType"] == "Resubmission") and "task" in schema["InitialTaskPath"].split("/")[1]) #Dealing with era and proc string if taskchain: # Setting the Era and ProcStr values per Task for key, value in schema.items(): if type(value) is dict and key.startswith("Task"): try: if 'ProcessingString' in value: procstring[ value['TaskName']] = value['ProcessingString'] else: procstring[ value['TaskName']] = schema['ProcessingString'] if 'AcquisitionEra' in value: era[value['TaskName']] = value['AcquisitionEra'] else: procstring[ value['TaskName']] = schema['AcquisitionEra'] except KeyError: print( "This taskchain request has no AcquisitionEra or ProcessingString defined into the Tasks, aborting..." ) sys.exit(1) # Adding the special string - in case it was provided in the command line if options.special: specialStr = '_' + str(options.special) for key, value in procstring.items(): procstring[key] = value + specialStr # Override if a value is given using the procstring command if options.procstring: procstring = options.procstring elif not taskchain: procstring = wf.info['ProcessingString'] if options.era: era = options.era elif not taskchain: era = wf.info['AcquisitionEra'] #Set era and procstring to none for merge ACDCs inside a task chain if schema["RequestType"] == "Resubmission" and wf.info[ "PrepID"].startswith("task") and "Merge" in schema[ "InitialTaskPath"].split("/")[-1]: era = None procstring = None # Must use --lfn option, otherwise workflow won't be assigned if options.lfn: lfn = options.lfn elif "MergedLFNBase" in wf.info: lfn = wf.info['MergedLFNBase'] else: print "Can't assign the workflow! Please include workflow lfn using --lfn option." sys.exit(0) # activity production by default for taskchains, reprocessing for default by workflows if options.activity: activity = options.activity elif taskchain: activity = 'production' else: activity = 'reprocessing' # given or default processing version if options.procversion: procversion = int(options.procversion) else: procversion = wf.info["ProcessingVersion"] # Check for output dataset existence, and abort if output datasets already exist! # Don't perform this check for ACDC's datasets = schema["OutputDatasets"] i = 0 if not (schema["RequestType"] == "Resubmission"): exist = False maxv = 1 for key, value in schema.items(): if type(value) is dict and key.startswith("Task"): dbsapi = DbsApi(url=dbs3_url) # list all datasets with same name but different version # numbers datasets = dbsapi.listDatasets( acquisition_era_name=value['AcquisitionEra'], primary_ds_name=value['PrimaryDataset'], detail=True, dataset_access_type='*') processedName = value['AcquisitionEra'] + '-' + value[ 'ProcessingString'] + "-v\\d+" # see if any of the dataset names is a match for ds in datasets: if re.match(processedName, ds['processed_ds_name']): print "Existing dset:", ds[ 'dataset'], "(%s)" % ds['dataset_access_type'] maxv = max(maxv, ds['processing_version']) exist = True else: pass i += 1 # suggest max version if exist and procversion <= maxv: print "Some output datasets exist, its advised to assign with v ==", maxv + 1 sys.exit(0) # If the --test argument was provided, then just print the information # gathered so far and abort the assignment if options.test: print "%s \tEra: %s \tProcStr: %s \tProcVer: %s" % ( wf.name, era, procstring, procversion) print "LFN: %s \tTeam: %s \tSite: %s" % (lfn, team, sites) print "Taskchain? " + str(taskchain) print "Activity:" + activity sys.exit(0) # Really assigning the workflow now print wf.name, '\tEra:', era, '\tProcStr:', procstring, '\tProcVer:', procversion, '\tTeam:', team, '\tSite:', sites assignRequest(url, wf.name, team, sites, era, procversion, activity, lfn, procstring, trust_site, options.replica, options.verbose, taskchain) sys.exit(0)