def _applicationModule(self): """It transfers parameter names from the module Fcc to the module FccAnalysis. :return: The module that takes these parameters :rtype: moduleinstance """ md1 = self._createModuleDefinition() md1.addParameter( Parameter("fccExecutable", "", "string", "", "", False, False, "The executable to run")) md1.addParameter( Parameter("isGaudiOptionsFileNeeded", False, "bool", "", "", False, False, "Gaudi configuration file")) md1.addParameter( Parameter("logLevel", "", "string", "", "", False, False, "Gaudi Log Level")) md1.addParameter( Parameter("read", "", "string", "", "", False, False, "Application can read or generate events")) md1.addParameter( Parameter("randomGenerator", {}, "dict", "", "", False, False, "Pythia card files")) return md1
def submitTS(job): """ Create a transformation executing the job workflow """ ### Temporary fix to initialize JOB_ID ####### job.workflow.addParameter( Parameter("JOB_ID", "000000", "string", "", "", True, False, "Temporary fix")) job.workflow.addParameter( Parameter("PRODUCTION_ID", "000000", "string", "", "", True, False, "Temporary fix")) job.setType('MCSimulation') ## Used for the JobType plugin t = Transformation() # t.setTransformationName("Prod3Exemple") # This must be unique. If not set it's asked in the prompt t.setType("MCSimulation") t.setDescription("MC Prod3 BaseLine HE test") t.setLongDescription("corsika-simtel production") # mandatory t.setBody(job.workflow.toXML()) res = t.addTransformation() # Transformation is created here if not res['OK']: print(res['Message']) DIRAC.exit(-1) t.setStatus("Active") t.setAgentType("Automatic") return res
def submit_trans(job, trans_name): """ Create a transformation executing the job workflow """ DIRAC.gLogger.notice('submit_trans : %s' % trans_name) # Initialize JOB_ID job.workflow.addParameter( Parameter("JOB_ID", "000000", "string", "", "", True, False, "Temporary fix")) job.workflow.addParameter( Parameter("PRODUCTION_ID", "000000", "string", "", "", True, False, "Temporary fix")) job.setType('MCSimulation') ## Used for the JobType plugin trans = Transformation() trans.setTransformationName(trans_name) trans.setType("MCSimulation") trans.setDescription("MC Prod3 BaseLine Corsika7 test") trans.setLongDescription("corsika-simtel production") # mandatory trans.setBody(job.workflow.toXML()) result = trans.addTransformation() # transformation is created here if not result['OK']: return result trans.setStatus("Active") trans.setAgentType("Automatic") trans_id = trans.getTransformationID() return trans_id
def _addBaseParameters(self, stepdefinition): """ Add to step the default parameters: appname, version, steeringfile, (nbevts, Energy), LogFile, InputFile, OutputFile, OutputPath """ stepdefinition.addParameter(Parameter("applicationName", "", "string", "", "", False, False, "Application Name")) stepdefinition.addParameter(Parameter("applicationVersion", "", "string", "", "", False, False, "Application Version")) stepdefinition.addParameter(Parameter("SteeringFile", "", "string", "", "", False, False, "Steering File")) stepdefinition.addParameter(Parameter("applicationLog", "", "string", "", "", False, False, "Log File")) stepdefinition.addParameter(Parameter("ExtraCLIArguments", "", "string", "", "", False, False, "Extra CLI arguments")) stepdefinition.addParameter(Parameter("InputFile", "", "string", "", "", True, False, "Input File")) if len(self.OutputFile): stepdefinition.addParameter(Parameter("OutputFile", "", "string", "", "", False, False, "Output File")) stepdefinition.addParameter(Parameter("OutputPath", "", "string", "", "", True, False, "Output File path on the grid")) stepdefinition.addParameter(Parameter("OutputSE", "", "string", "", "", True, False, "Output File storage element")) stepdefinition.addParameter(Parameter('listoutput', [], "list", "", "", False, False, "list of output file name")) #Following should be workflow parameters #stepdefinition.addParameter(Parameter("NbOfEvents", 0, "int", "", "", False, False, # "Number of events to process")) #stepdefinition.addParameter(Parameter("Energy", 0, "int", "", "", False, False, "Energy")) return self._getSpecificAppParameters(stepdefinition)
def _applicationModule(self): m1 = self._createModuleDefinition() m1.addParameter( Parameter("debug", False, "bool", "", "", False, False, "debug mode")) m1.addParameter( Parameter("nbEventsPerSlice", 0, "int", "", "", False, False, "Number of events per output file")) return m1
def _applicationModule(self): m1 = self._createModuleDefinition() m1.addParameter( Parameter("collections", [], "list", "", "", False, False, "Collections to check for")) m1.addParameter( Parameter("debug", False, "bool", "", "", False, False, "debug mode")) return m1
def getStepDefinition(stepName, modulesNameList=None, importLine="""""", parametersList=None): """ Given a name, a list of modules name, and a list of parameters, returns a step definition. Remember that Step definition = Parameters + Module Instances """ if modulesNameList is None: modulesNameList = [] if parametersList is None: parametersList = [] # In case the importLine is not set, this is looking for a DIRAC extension, if any. # The extension is supposed to be called ExtDIRAC. if not importLine: importLine = "DIRAC.Workflow.Modules" for ext in getCSExtensions(): if ext.lower() == getVO(): importLine = ext + "DIRAC.Workflow.Modules" break stepDef = StepDefinition(stepName) for moduleName in modulesNameList: # create the module definition moduleDef = ModuleDefinition(moduleName) try: # Look in the importLine given, or the DIRAC if the given location can't be imported moduleDef.setDescription( getattr( __import__("%s.%s" % (importLine, moduleName), globals(), locals(), ['__doc__']), "__doc__")) moduleDef.setBody("""\nfrom %s.%s import %s\n""" % (importLine, moduleName, moduleName)) except ImportError: alternativeImportLine = "DIRAC.Workflow.Modules" moduleDef.setDescription( getattr( __import__("%s.%s" % (alternativeImportLine, moduleName), globals(), locals(), ['__doc__']), "__doc__")) moduleDef.setBody("""\nfrom %s.%s import %s\n""" % (alternativeImportLine, moduleName, moduleName)) # add the module to the step, and instance it stepDef.addModule(moduleDef) stepDef.createModuleInstance(module_type=moduleName, name=moduleName) # add parameters to the module definition for pName, pType, pValue, pDesc in parametersList: p = Parameter(pName, pValue, pType, "", "", True, False, pDesc) stepDef.addParameter(Parameter(parameter=p)) return stepDef
def _applicationModule(self): m1 = self._createModuleDefinition() m1.addParameter( Parameter("arguments", "", "string", "", "", False, False, "Arguments to pass to the script")) m1.addParameter( Parameter("script", "", "string", "", "", False, False, "Script to execute")) m1.addParameter( Parameter("debug", False, "bool", "", "", False, False, "debug mode")) return m1
def _applicationModule(self): m1 = self._createModuleDefinition() m1.addParameter( Parameter("MaxNbEvts", 0, "int", "", "", False, False, "Number of events to read")) m1.addParameter( Parameter("debug", False, "bool", "", "", False, False, "debug mode")) m1.addParameter( Parameter("inlineCuts", "", "string", "", "", False, False, "Inline cuts")) return m1
def _applicationModule(self): md1 = self._createModuleDefinition() md1.addParameter(Parameter("extraparams", "", "string", "", "", False, False, "Command line parameters to pass to java")) md1.addParameter(Parameter("aliasproperties", "", "string", "", "", False, False, "Path to the alias.properties file name that will be used")) md1.addParameter(Parameter("debug", False, "bool", "", "", False, False, "debug mode")) md1.addParameter(Parameter("detectorModel", "", "string", "", "", False, False, "detector model zip file")) md1.addParameter(Parameter("trackingstrategy", "", "string", "", "", False, False, "trackingstrategy")) return md1
def _addParameter(wObject, name, ptype, value, description, io='input'): """ Internal Function Adds a parameter to the object. """ if io == 'input': inBool = True outBool = False elif io == 'output': inBool = False outBool = True else: raise TypeError('I/O flag is either input or output') par = Parameter(name, value, ptype, "", "", inBool, outBool, description) wObject.addParameter(Parameter(parameter=par))
def submit_trans(job, infileList, trans_name, group_size): """ Create a transformation executing the job workflow """ DIRAC.gLogger.notice('submit_trans : %s' % trans_name) # Initialize JOB_ID job.workflow.addParameter( Parameter("JOB_ID", "000000", "string", "", "", True, False, "Temporary fix")) trans = Transformation() trans.setTransformationName(trans_name) # this must be unique trans.setType("DataReprocessing") trans.setDescription("Simtel TS example") trans.setLongDescription("Simtel tel_sim") # mandatory trans.setBody(job.workflow.toXML()) trans.setGroupSize(group_size) res = trans.addTransformation() # transformation is created here if not res['OK']: return res trans.setStatus("Active") trans.setAgentType("Automatic") # add 10*group_size files to transformation (to have the first 10 jobs) trans_id = trans.getTransformationID() trans_client = TransformationClient() res = trans_client.addFilesToTransformation(trans_id['Value'], infileList[:10 * group_size]) return res
def createWorkflowBodyStep1(): job = Job() job.setName("mandelbrot raw") job.setOutputSandbox(["*log"]) # this is so that the JOB_ID within the transformation can be evaluated on the fly in the job application, see below job.workflow.addParameter( Parameter("JOB_ID", "000000", "string", "", "", True, False, "Initialize JOB_ID")) # define the job workflow in 3 steps # job step1: setup software job.setExecutable("git clone https://github.com/bregeon/mandel4ts.git") # job step2: run mandelbrot application # note how the JOB_ID (within the transformation) is passed as an argument and will be evaluated on the fly job.setExecutable("./mandel4ts/mandelbrot.py", arguments="-P 0.0005 -M 1000 -L @{JOB_ID} -N 200") # job step3: upload data and set metadata outputPath = os.path.join("/dirac/prodsys/mandelbrot/images/raw") outputPattern = "data_*txt" outputSE = "RAL-SE" outputMetadata = json.dumps({ "application": "mandelbrot", "image_format": "ascii", "image_width": 7680, "image_height": 200 }) job.setExecutable( "./mandel4ts/dirac-add-files.py", arguments="%s '%s' %s '%s'" % (outputPath, outputPattern, outputSE, outputMetadata), ) return job.workflow.toXML()
def addWrapper(self, logFile=''): """ Overload the DIRAC.Job.setExecutable """ logFile = str(logFile) stepDefn = 'WrapperStep' stepName = 'RunWrapperStep' moduleName = 'GlastWrapperCall' module = ModuleDefinition(moduleName) module.setDescription('The utility that calls the pipeline_wrapper.') body = 'from GlastDIRAC.PipelineSystem.Modules.GlastWrapperCall import GlastWrapperCall\n' module.setBody(body) # Create Step definition step = StepDefinition(stepDefn) step.addModule(module) moduleInstance = step.createModuleInstance('GlastWrapperCall', stepDefn) # Define step parameters step.addParameter( Parameter("logFile", "", "string", "", "", False, False, 'Log file name')) self.addToOutputSandbox.append(logFile) self.workflow.addStep(step) # Define Step and its variables stepInstance = self.workflow.createStepInstance(stepDefn, stepName) stepInstance.setValue("logFile", logFile) return S_OK()
def _applicationModule(self): md1 = self._createModuleDefinition() md1.addParameter( Parameter("RandomSeed", 0, "int", "", "", False, False, "Random seed for the generator")) md1.addParameter( Parameter("detectorModel", "", "string", "", "", False, False, "Detecor model for simulation")) md1.addParameter( Parameter("startFrom", 0, "int", "", "", False, False, "From how Slic start to read the input file")) md1.addParameter( Parameter("debug", False, "bool", "", "", False, False, "debug mode")) return md1
def submitTSold(job, transName, mqJson): """ Create a transformation executing the job workflow """ DIRAC.gLogger.notice('submitTS') # Initialize JOB_ID job.workflow.addParameter( Parameter("JOB_ID", "000000", "string", "", "", True, False, "Temporary fix")) tc = TransformationClient() res = tc.addTransformation(transName, 'EvnDisp3MSCW example', 'EvnDisplay stereo reconstruction', 'DataReprocessing', 'Standard', 'Automatic', mqJson, groupSize=10, body=job.workflow.toXML()) if not res['OK']: DIRAC.gLogger.error(res['Message']) DIRAC.exit(-1) else: transID = res['Value'] print(transID) return res
def submit_trans(job, trans_name, mqJson, group_size, with_file_mask=True): """ Create a transformation executing the job workflow """ DIRAC.gLogger.notice('submit_trans : %s' % trans_name) # Initialize JOB_ID job.workflow.addParameter( Parameter("JOB_ID", "000000", "string", "", "", True, False, "Temporary fix")) trans = Transformation() trans.setTransformationName(trans_name) # this must be unique trans.setType("DataReprocessing") trans.setDescription("Prod3 DL1 Data Handler TS") trans.setLongDescription("Prod3 DL1 Data Handler conversion") # mandatory trans.setBody(job.workflow.toXML()) trans.setGroupSize(group_size) if with_file_mask: trans.setFileMask(mqJson) # catalog query is defined here result = trans.addTransformation() # transformation is created here if not result['OK']: return result trans.setStatus("Active") trans.setAgentType("Automatic") trans_id = trans.getTransformationID() return trans_id
def _applicationModule(self): md1 = self._createModuleDefinition() md1.addParameter( Parameter("pandorasettings", "", "string", "", "", False, False, "Pandora Settings")) md1.addParameter( Parameter("detectorxml", "", "string", "", "", False, False, "Detector model for simulation")) md1.addParameter( Parameter("startFrom", 0, "int", "", "", False, False, "From how SlicPandora start to read the input file")) md1.addParameter( Parameter("debug", False, "bool", "", "", False, False, "debug mode")) return md1
def submit_trans(job, input_meta_query, group_size): """ Create a transformation executing the job workflow """ #DIRAC.gLogger.notice('submit_trans : %s' % trans_name) # Initialize JOB_ID job.workflow.addParameter( Parameter("JOB_ID", "000000", "string", "", "", True, False, "Temporary fix")) t = Transformation() t.setType("DataReprocessing") t.setDescription("EvnDisplay MQ example") t.setLongDescription("EvnDisplay calib_imgreco") # mandatory t.setBody(job.workflow.toXML()) t.setGroupSize(group_size) t.setInputMetaQuery(input_meta_query) res = t.addTransformation() # transformation is created here if not res['OK']: return res #t.setStatus("Active") t.setAgentType("Automatic") trans_id = t.getTransformationID() return trans_id
def submit_trans(job, trans_name, input_meta_query, group_size): """ Create a transformation executing the job workflow """ DIRAC.gLogger.notice('submit_trans : %s' % trans_name) # Initialize JOB_ID job.workflow.addParameter( Parameter("JOB_ID", "000000", "string", "", "", True, False, "Temporary fix")) trans = Transformation() trans.setTransformationName(trans_name) # this must be unique trans.setType("DataReprocessing") trans.setDescription("Prod5 EventDisplay TS") trans.setLongDescription("Prod5 EventDisplay processing") # mandatory trans.setBody(job.workflow.toXML()) trans.setGroupSize(group_size) trans.setInputMetaQuery(input_meta_query) result = trans.addTransformation() # transformation is created here if not result['OK']: return result trans.setStatus("Active") trans.setAgentType("Automatic") trans_id = trans.getTransformationID() return trans_id
def _applicationModule(self): """ This method allows to define the module parameters: application specific things The parameters, for ex. 'script' will become a module member. """ m1 = self._createModuleDefinition() ## This line MUST be there. ## Below is optional if there are no parameters. The return statement is mandatory. m1.addParameter( Parameter("script", "", "string", "", "", False, False, "Script to execute")) m1.addParameter( Parameter("arguments", "", "string", "", "", False, False, "Arguments to pass to the script")) m1.addParameter( Parameter("debug", False, "bool", "", "", False, False, "debug mode")) return m1
def getStepDefinition(stepName, modulesNameList=None, importLine="", parametersList=None): """Given a name, a list of modules name, and a list of parameters, returns a step definition. Remember that Step definition = Parameters + Module Instances """ if modulesNameList is None: modulesNameList = [] if parametersList is None: parametersList = [] stepDef = StepDefinition(stepName) for moduleName in modulesNameList: module = None if importLine: try: module = importlib.import_module(importLine + "." + moduleName) except ImportError: pass # In case the importLine is not set, this is looking for a DIRAC extension, if any if module is None: module = ObjectLoader().loadModule("Workflow.Modules." + moduleName)["Value"] # create the module definition moduleDef = ModuleDefinition(moduleName) moduleDef.setDescription(module.__doc__) moduleDef.setBody("\nfrom %s import %s\n" % (module.__name__, moduleName)) # add the module to the step, and instance it stepDef.addModule(moduleDef) stepDef.createModuleInstance(module_type=moduleName, name=moduleName) # add parameters to the module definition for pName, pType, pValue, pDesc in parametersList: p = Parameter(pName, pValue, pType, "", "", True, False, pDesc) stepDef.addParameter(Parameter(parameter=p)) return stepDef
def _applicationModule(self): md1 = self._createModuleDefinition() md1.addParameter( Parameter("inputGEAR", '', "string", "", "", False, False, "Input GEAR file")) md1.addParameter( Parameter("detectorModel", '', "string", "", "", False, False, "DD4hep Geomtry File")) md1.addParameter( Parameter("ProcessorListToUse", [], "list", "", "", False, False, "List of processors to use")) md1.addParameter( Parameter("ProcessorListToExclude", [], "list", "", "", False, False, "List of processors to exclude")) md1.addParameter( Parameter("debug", False, "bool", "", "", False, False, "debug mode")) return md1
def __getScriptStep( self, name = 'Script' ): """Internal function. This method controls the definition for a script module. """ # Create the script module first moduleName = 'Script' module = ModuleDefinition( moduleName ) module.setDescription( 'A script module that can execute any provided script.' ) body = 'from DIRAC.Core.Workflow.Modules.Script import Script\n' module.setBody( body ) # Create Step definition step = StepDefinition( name ) step.addModule( module ) moduleInstance = step.createModuleInstance( 'Script', name ) # Define step parameters step.addParameter( Parameter( "name", "", "string", "", "", False, False, 'Name of executable' ) ) step.addParameter( Parameter( "executable", "", "string", "", "", False, False, 'Executable Script' ) ) step.addParameter( Parameter( "arguments", "", "string", "", "", False, False, 'Arguments for executable Script' ) ) step.addParameter( Parameter( "logFile", "", "string", "", "", False, False, 'Log file name' ) ) return step
def submitTS( transName, job, outputquery ): """ Create a transformation executing the job workflow """ ### Temporary fix to initialize JOB_ID ####### job.workflow.addParameter( Parameter( "JOB_ID", "000000", "string", "", "", True, False, "Temporary fix" ) ) job.workflow.addParameter( Parameter( "PRODUCTION_ID", "000000", "string", "", "", True, False, "Temporary fix" ) ) job.setType('MCSimulation') ## Used for the JobType plugin transClient = TransformationClient() res = transClient.addTransformation( transName, 'MC Prod3 BaseLine test', 'corsika-simtel production', 'MCSimulation', 'Standard', 'Manual', '', body=job.workflow.toXML(), outputMetaQuery=outputquery ) if not res['OK']: print(res['Message']) DIRAC.exit( -1 ) # t.setStatus( "Active" ) # t.setAgentType( "Automatic" ) return res
def build_simulation_step(DL0_data_set, name_tag=''): ''' Setup Corsika + sim_telarray step Note that there is no InputQuery, since jobs created by this steps don't require any InputData @return ProductionStep object ''' # Note that there is no InputQuery, # since jobs created by this steps don't require any InputData DIRAC.gLogger.notice('MC Production step') prod_step_1 = ProductionStep() prod_step_1.Name = 'Simulation_%s' % DL0_data_set.replace('AdvancedBaseline_NSB1x_','') prod_step_1.Name += '%s' % name_tag prod_step_1.Type = 'MCSimulation' prod_step_1.Outputquery = get_dataset_MQ(DL0_data_set) prod_step_1.Outputquery['nsb'] = {'in': [1, 5]} # get meta data to be passed to simulation job site = prod_step_1.Outputquery['site'] particle = prod_step_1.Outputquery['particle'] if prod_step_1.Outputquery['phiP']['='] == 180: pointing_dir = 'North' elif prod_step_1.Outputquery['phiP']['='] == 0: pointing_dir = 'South' zenith_angle = prod_step_1.Outputquery['thetaP']['='] # Here define the job description (i.e. Name, Executable, etc.) # to be associated to the first ProductionStep, as done when using the TS job1 = Prod5bMCPipeNSBJob() job1.version = '2020-06-29b' job1.compiler = 'gcc83_matchcpu' # Initialize JOB_ID job1.workflow.addParameter(Parameter("JOB_ID", "000000", "string", "", "", True, False, "Temporary fix")) # configuration job1.setName('Prod5b_MC_Pipeline_NSB') job1.set_site(site) job1.set_particle(particle) job1.set_pointing_dir(pointing_dir) job1.zenith_angle = zenith_angle job1.n_shower = 50000 if particle == 'gamma': job1.n_shower = 20000 job1.setOutputSandbox(['*Log.txt']) job1.start_run_number = '0' job1.run_number = '@{JOB_ID}' # dynamic job1.setupWorkflow(debug=False) # Add the job description to the first ProductionStep prod_step_1.Body = job1.workflow.toXML() # return ProductionStep object return prod_step_1
def defineProd3MCJob(version, layout, site, particle, pointing, zenith, nShower): """ Simple wrapper to create a Prod3MCJob and setup parameters """ job = Prod3MCPipeJob() # package and version job.setPackage('corsika_simhessarray') job.setVersion(version) # final with fix for gamma-diffuse job.no_sct = True # NO SCT for 40deg ! job.setArrayLayout(layout) job.setSite(site) job.setParticle(particle) job.setPointingDir(pointing) job.setZenithAngle(zenith) job.setNShower(nShower) ### Set the startrunNb here (it will be added to the Task_ID) startrunNb = '0' job.setStartRunNumber(startrunNb) # set run number for TS submission: JOB_ID variable left for dynamic resolution during the Job. It corresponds to the Task_ID job.setRunNumber('@{JOB_ID}') # get dirac log files job.setOutputSandbox(['*Log.txt']) # add the sequence of executables job.setupWorkflow(debug=False) ### Temporary fix to initialize JOB_ID ####### job.workflow.addParameter( Parameter("JOB_ID", "000000", "string", "", "", True, False, "Temporary fix")) job.workflow.addParameter( Parameter("PRODUCTION_ID", "000000", "string", "", "", True, False, "Temporary fix")) job.setType('MCSimulation') ## Used for the JobType plugin return job
def startElement(self, name, attrs): # print name ,"startElement", "attr=", attrs.getLength(), attrs.getNames() self.clearCharacters( ) # clear to remove empty or nonprintable characters if name == "Workflow": if self.root == None: # if root not defined by constractor self.root = Workflow() self.stack.append(self.root) elif name == "StepDefinition": obj = StepDefinition("TemporaryXMLObject_StepDefinition") if self.root == None: # in case we are saving Step only self.root = obj self.stack.append(obj) elif name == "StepInstance": obj = StepInstance("TemporaryXMLObject_StepInstance") self.stack.append(obj) elif name == "ModuleDefinition": obj = ModuleDefinition("TemporaryXMLObject_ModuleDefinition") if self.root == None: # in case we are saving Module only self.root = obj self.stack.append(obj) elif name == "ModuleInstance": obj = ModuleInstance("TemporaryXMLObject_ModuleInstance") self.stack.append(obj) elif name == "Parameter": obj = Parameter(str(attrs['name']), None, str(attrs['type']), str(attrs['linked_module']), str(attrs['linked_parameter']), str(attrs['in']), str(attrs['out']), str(attrs['description'])) self.stack.append(obj) # TEMPORARY CODE elif name == "origin" or name == "version" or name == "name" or name == "type" or name == "value" or\ name == "required" or name == "descr_short" or name == "name" or name == "type" or name == "description" or name == "body": pass else: print "UNTREATED! startElement name=", name, "attr=", attrs.getLength( ), attrs.getNames() pass
def submitTS(job, transName, inputquery): """ Create a transformation executing the job workflow """ DIRAC.gLogger.notice('submitTS') # Initialize JOB_ID job.workflow.addParameter( Parameter("JOB_ID", "000000", "string", "", "", True, False, "Temporary fix")) transClient = TransformationClient() res = transClient.addTransformation(transName, 'EvnDisp3 example', 'EvnDisplay calib_imgreco', 'DataReprocessing', 'Standard', 'Manual', '', inputMetaQuery=inputquery, groupSize=1, body=job.workflow.toXML()) return res
def submit_trans(job, trans_name): """ Create a transformation executing the job workflow """ DIRAC.gLogger.notice('submit_trans : %s' % trans_name) # Initialize JOB_ID job.workflow.addParameter(Parameter("JOB_ID", "000000", "string", "", "", True, False, "Temporary fix")) trans = Transformation() trans.setTransformationName(trans_name) # this must be unique trans.setType("MCSimulation") trans.setDescription("Prod5 MC Pipe NSB TS") trans.setLongDescription("Prod5 simulation pipeline") # mandatory trans.setBody(job.workflow.toXML()) result = trans.addTransformation() # transformation is created here if not result['OK']: return result trans.setStatus("Active") trans.setAgentType("Automatic") trans_id = trans.getTransformationID() return trans_id