def build_evndisp_step(DL0_data_set, nsb=1): ''' Define a new EventDisplay analysis production step @return ProductionStep object ''' if nsb == 1: DIRAC.gLogger.notice('NSB1x Analysis') DL0_data_set_NSB = DL0_data_set elif nsb == 5: DIRAC.gLogger.notice('NSB5x Analysis') DL0_data_set_NSB = DL0_data_set.replace('NSB1x', 'NSB5x') prod_step_2 = ProductionStep() prod_step_2.Name = 'Analysis_'+DL0_data_set_NSB.replace('AdvancedBaseline_', '').replace('DL0', 'DL1') prod_step_2.Type = 'DataReprocessing' # This corresponds to the Transformation Type prod_step_2.Inputquery = get_dataset_MQ(DL0_data_set_NSB) prod_step_2.Outputquery = get_dataset_MQ(DL0_data_set_NSB.replace('DL0', 'DL1')) # Here define the job description to be associated to the second ProductionStep job2 = EvnDispProd5Job() job2.setName('Prod5_EvnDisp') # output job2.setOutputSandbox(['*Log.txt']) # refine output meta data if needed output_meta_data = copy(prod_step_2.Outputquery) job2.set_meta_data(output_meta_data) job2.set_file_meta_data(nsb=output_meta_data['nsb']['=']) # check if La Palma else use default that is Paranal if output_meta_data['site'] == 'LaPalma': job2.prefix = "CTA.prod5N" job2.layout_list = 'BL-0LSTs05MSTs-MSTF BL-0LSTs05MSTs-MSTN \ BL-4LSTs00MSTs-MSTN BL-4LSTs05MSTs-MSTF \ BL-4LSTs05MSTs-MSTN BL-4LSTs09MSTs-MSTF \ BL-4LSTs09MSTs-MSTN BL-4LSTs15MSTs-MSTF \ BL-4LSTs15MSTs-MSTN' DIRAC.gLogger.notice('LaPalma layouts:\n',job2.layout_list.split()) elif output_meta_data['site'] == 'Paranal': DIRAC.gLogger.notice('Paranal layouts:\n',job2.layout_list.split()) job2.ts_task_id = '@{JOB_ID}' # dynamic job2.setupWorkflow(debug=False) job2.setType('EvnDisp3') # mandatory *here* prod_step_2.Body = job2.workflow.toXML() prod_step_2.GroupSize = 5 return prod_step_2
def build_evndisp_step(DL0_data_set, nsb=1, name_tag=''): ''' Define a new EventDisplay analysis production step @return ProductionStep object ''' if nsb == 1: DIRAC.gLogger.notice('NSB1x Analysis') DL0_data_set_NSB = DL0_data_set elif nsb == 5: DIRAC.gLogger.notice('NSB5x Analysis') DL0_data_set_NSB = DL0_data_set.replace('NSB1x', 'NSB5x') prod_step_2 = ProductionStep() prod_step_2.Name = 'Analysis_'+DL0_data_set_NSB.replace('AdvancedBaseline_', '').replace('DL0', 'DL1') prod_step_2.Name += '%s' % name_tag prod_step_2.Type = 'DataReprocessing' # This corresponds to the Transformation Type prod_step_2.Inputquery = get_dataset_MQ(DL0_data_set_NSB) prod_step_2.Outputquery = get_dataset_MQ(DL0_data_set_NSB.replace('DL0', 'DL1')) # Here define the job description to be associated to the second ProductionStep job2 = EvnDispProd5SingJob(cpuTime=259200.) job2.version = 'eventdisplay-cta-dl1-prod5.v03' job2.setName('Prod5b_EvnDisp_Singularity') # output job2.setOutputSandbox(['*Log.txt']) # refine output meta data if needed output_meta_data = copy(prod_step_2.Outputquery) job2.set_meta_data(output_meta_data) job2.set_file_meta_data(nsb=output_meta_data['nsb']['=']) job2.ts_task_id = '@{JOB_ID}' # dynamic job2.group_size = 5 # for the input files verification job2.setupWorkflow(debug=False) job2.setType('EvnDisp3') # mandatory *here* prod_step_2.Body = job2.workflow.toXML() prod_step_2.GroupSize = 5 # have to match the above group size? # return ProductionStep object return prod_step_2