def createWorkflow(self, runNumber, primaryDataset, processedDataset, dataTier): """ _createWorkflow_ Create a workflow for a given run and primary dataset. If the workflow has been created previously, load it and use it. """ jobCache = os.path.join(self.args["ComponentDir"], "T0ASTPlugin", "Run" + runNumber) if not os.path.exists(jobCache): os.makedirs(jobCache) workflowSpecFileName = "DQMHarvest-Run%s-%s-workflow.xml" % (runNumber, primaryDataset) workflowSpecPath = os.path.join(jobCache, workflowSpecFileName) if os.path.exists(workflowSpecPath): msg = "Loading existing workflow for dataset: %s\n " % primaryDataset msg += " => %s\n" % workflowSpecPath logging.info(msg) workflowSpec = WorkflowSpec() workflowSpec.load(workflowSpecPath) return (workflowSpec, workflowSpecPath) msg = "No workflow found for dataset: %s\n " % primaryDataset msg += "Looking up software version and generating workflow..." recoConfig = self.t0astWrapper.listRecoConfig(runNumber, primaryDataset) if not recoConfig["DO_RECO"]: logging.info("RECO disabled for dataset %s" % primaryDataset) return (None, None) globalTag = self.args.get("OverrideGlobalTag", None) if globalTag == None: globalTag = recoConfig["GLOBAL_TAG"] cmsswVersion = self.args.get("OverrideCMSSW", None) if cmsswVersion == None: cmsswVersion = recoConfig["CMSSW_VERSION"] datasetPath = "/%s/%s/%s" % (primaryDataset, processedDataset, dataTier) workflowSpec = createHarvestingWorkflow(datasetPath, self.site, self.args["CmsPath"], self.args["ScramArch"], cmsswVersion, globalTag, configFile=self.args["ConfigFile"], DQMServer=self.args['DQMServer'], proxyLocation=self.args['proxyLocation'], DQMCopyToCERN=self.args['DQMCopyToCERN'], doStageOut=self.args['DoStageOut']) workflowSpec.save(workflowSpecPath) msg = "Created Harvesting Workflow:\n %s" % workflowSpecPath logging.info(msg) self.publishWorkflow(workflowSpecPath, workflowSpec.workflowName()) return (workflowSpec, workflowSpecPath)
for opt,arg in opts: if opt == '--workflow': workflow = arg if workflow is None: msg = 'Please provide workflow file path parameter i.e --workflow=file_path' raise RuntimeError, msg else: try: spec = WorkflowSpec() spec.load(workflow) spec.parameters['PreStage'] = 'True' spec.save(workflow) except Exception, ex: msg = 'Exception caught while enabling prestage\n' msg += str(ex) raise RuntimeError, msg return if __name__ == "__main__": setPrestage()
try: loader.load() except Exception, ex: msg = "Couldn't load CMSSW libraries: %s" % ex raise RuntimeError, msg loadedModule = imp.load_source( os.path.basename(cfgFile).replace(".py", ""), cfgFile ) cmsRunNode.cfgInterface = CMSSWConfig() loadedConfig = cmsRunNode.cfgInterface.loadConfiguration(loadedModule.process) loadedConfig.validateForProduction() loader.unload() # generate Dataset information for workflow from cfgInterface for moduleName,outMod in cmsRunNode.cfgInterface.outputModules.items(): outMod["LFNBase"] = lfnbase outMod["logicalFileName"] = os.path.join( lfnbase, "%s.root" % moduleName ) WorkflowTools.addStageOutNode(cmsRunNode, "stageOut1") workflow.save("%s-Workflow.xml" % workflowName) print "Created: %s-Workflow.xml" % workflowName print "From: %s " % cfgFile
def __call__(self, node): if node.name == self.name: self.result = node return finder = NodeFinder(stageOutNode) spec.payload.operate(finder) node = finder.result if not node.type == "StageOut": msg = "Node %s is not a StageOut node\n" % stageOutNode msg += "It is a node of type: %s\n" % node.type raise RuntimeError, msg #node.configuration = "" addStageOutOverride(node, override['command'], override['option'], override['se-name'], override['lfn-prefix']) spec.save(specFile) sys.exit(0)