def run_simtel_sst(args):
    """ Simple wrapper to create a Prod4SimtelSSTJob and setup parameters
        from positional arguments given on the command line.

        Parameters:
        args -- mode (trans_name dataset_name group_size)
    """
    DIRAC.gLogger.notice('run_simtel_sst')
    # get arguments
    mode = args[0]

    if mode == 'TS':
        trans_name = args[1]
        dataset_name = args[2]
        group_size = int(args[3])

    # job setup - 72 hours
    job = Prod4SimtelSSTJob(cpuTime=259200.)
    # override for testing
    job.setName('Prod4_Simtel')
    job.version='2018-11-07'

    # output
    job.setOutputSandbox(['*Log.txt'])

    # specific configuration
    if mode == 'WMS':
        job.base_path = '/vo.cta.in2p3.fr/user/b/bregeon'
        job.ts_task_id = '1'
        output_meta_data = {'array_layout': 'Baseline-SST-only', 'site': 'Paranal',
                           'particle': 'proton', 'phiP': 0.0, 'thetaP': 20.0,
                           job.program_category + '_prog': 'simtel',
                           job.program_category + '_prog_version': job.version,
                           'data_level': 0, 'configuration_id': 4}
        job.set_meta_data(output_meta_data)
        job.telconfig_list = 'sst-1m sst-gct sst-astri+chec-s'
        job.N_output_files = len(job.telconfig_list.split())
        job.setupWorkflow(debug=True)
        # subtmit to the WMS for debug
        job.setDestination('LCG.IN2P3-CC.fr')
        result = submit_wms(job)
    elif mode == 'TS':
        input_meta_query = get_dataset_MQ(dataset_name)
        # refine output meta data if needed
        output_meta_data = copy(input_meta_query)
        job.set_meta_data(output_meta_data)
        #sst-1m sst-astri sst-astri+chec-s sst-astri+chec-s-7mm sst-gct
        job.telconfig_list = 'sst-1m sst-gct sst-astri+chec-s'
        job.N_output_files = len(job.telconfig_list.split())

        job.ts_task_id = '@{JOB_ID}'  # dynamic
        job.setupWorkflow(debug=False)
        job.setType('EvnDisp3')  # mandatory *here*
        result = submit_trans(job, trans_name, input_meta_query, group_size)
    else:
        DIRAC.gLogger.error('1st argument should be the job mode: WMS or TS,\n\
                             not %s' % mode)
        return None

    return result
Esempio n. 2
0
def build_evndisp_step(DL0_data_set, nsb=1):
    ''' Define a new EventDisplay analysis production step

    @return ProductionStep object
    '''
    if nsb == 1:
        DIRAC.gLogger.notice('NSB1x Analysis')
        DL0_data_set_NSB = DL0_data_set
    elif nsb == 5:
        DIRAC.gLogger.notice('NSB5x Analysis')
        DL0_data_set_NSB = DL0_data_set.replace('NSB1x', 'NSB5x')

    prod_step_2 = ProductionStep()
    prod_step_2.Name = 'Analysis_'+DL0_data_set_NSB.replace('AdvancedBaseline_', '').replace('DL0', 'DL1')
    prod_step_2.Type = 'DataReprocessing'  # This corresponds to the Transformation Type
    prod_step_2.Inputquery = get_dataset_MQ(DL0_data_set_NSB)
    prod_step_2.Outputquery = get_dataset_MQ(DL0_data_set_NSB.replace('DL0', 'DL1'))

    # Here define the job description to be associated to the second ProductionStep
    job2 = EvnDispProd5Job()
    job2.setName('Prod5_EvnDisp')
    # output
    job2.setOutputSandbox(['*Log.txt'])
    # refine output meta data if needed
    output_meta_data = copy(prod_step_2.Outputquery)
    job2.set_meta_data(output_meta_data)
    job2.set_file_meta_data(nsb=output_meta_data['nsb']['='])

    # check if La Palma else use default that is Paranal
    if output_meta_data['site'] == 'LaPalma':
        job2.prefix = "CTA.prod5N"
        job2.layout_list = 'BL-0LSTs05MSTs-MSTF BL-0LSTs05MSTs-MSTN \
                            BL-4LSTs00MSTs-MSTN BL-4LSTs05MSTs-MSTF \
                            BL-4LSTs05MSTs-MSTN BL-4LSTs09MSTs-MSTF \
                            BL-4LSTs09MSTs-MSTN BL-4LSTs15MSTs-MSTF \
                            BL-4LSTs15MSTs-MSTN'
        DIRAC.gLogger.notice('LaPalma layouts:\n',job2.layout_list.split())
    elif output_meta_data['site'] == 'Paranal':
        DIRAC.gLogger.notice('Paranal layouts:\n',job2.layout_list.split())

    job2.ts_task_id = '@{JOB_ID}'  # dynamic
    job2.setupWorkflow(debug=False)
    job2.setType('EvnDisp3')  # mandatory *here*
    prod_step_2.Body = job2.workflow.toXML()
    prod_step_2.GroupSize = 5

    return prod_step_2
Esempio n. 3
0
def launch_job(args):
    """ Simple launcher to instanciate a Job and setup parameters
        from positional arguments given on the command line.

        Parameters:
        args -- mode (trans_name dataset_name group_size)
    """
    DIRAC.gLogger.notice('Running EventDisplay jobs')
    # get arguments
    mode = args[0]

    if mode == 'TS':
        trans_name = args[1]
        dataset_name = args[2]
        group_size = int(args[3])

    # job setup - 72 hours
    job = EvnDispProd5SingJob(cpuTime=259200.)
    job.version = 'eventdisplay-cta-dl1-prod5.v03'
    # override for testing
    job.setName('Prod5b_EvnDisp')
    # output
    job.setOutputSandbox(['*Log.txt'])

    # specific configuration
    if mode == 'WMS':
        job.base_path = '/vo.cta.in2p3.fr/user/b/bregeon'
        job.ts_task_id = '62'
        simtel_meta_data = {
            'array_layout': 'Advanced-Baseline',
            'site': 'LaPalma',
            'particle': 'proton',
            'phiP': 0.0,
            'thetaP': 20.0
        }
        job.set_meta_data(simtel_meta_data)
        job.set_file_meta_data(nsb=1)
        job.setupWorkflow(debug=True)
        # subtmit to the WMS for debug
        # job.setDestination('LCG.IN2P3-CC.fr')
        result = submit_wms(job)
    elif mode == 'TS':
        input_meta_query = get_dataset_MQ(dataset_name)
        # refine output meta data if needed
        output_meta_data = copy(input_meta_query)
        job.set_meta_data(output_meta_data)
        job.set_file_meta_data(nsb=output_meta_data['nsb']['='])
        # adjust calibration file
        job.ts_task_id = '@{JOB_ID}'  # dynamic
        job.group_size = group_size  # for the input files verification
        job.setupWorkflow(debug=False)
        job.setType('EvnDisp3')  # mandatory *here*
        result = submit_trans(job, trans_name, input_meta_query, group_size)
    else:
        DIRAC.gLogger.error('1st argument should be the job mode: WMS or TS,\n\
                             not %s' % mode)
        return None

    return result
Esempio n. 4
0
def build_simulation_step(DL0_data_set, name_tag=''):
    ''' Setup Corsika + sim_telarray step

    Note that there is no InputQuery,
    since jobs created by this steps don't require any InputData

    @return ProductionStep object
    '''
    # Note that there is no InputQuery,
    # since jobs created by this steps don't require any InputData
    DIRAC.gLogger.notice('MC Production step')
    prod_step_1 = ProductionStep()
    prod_step_1.Name = 'Simulation_%s' % DL0_data_set.replace('AdvancedBaseline_NSB1x_','')
    prod_step_1.Name += '%s' % name_tag
    prod_step_1.Type = 'MCSimulation'
    prod_step_1.Outputquery = get_dataset_MQ(DL0_data_set)
    prod_step_1.Outputquery['nsb'] = {'in': [1, 5]}

    # get meta data to be passed to simulation job
    site = prod_step_1.Outputquery['site']
    particle = prod_step_1.Outputquery['particle']
    if prod_step_1.Outputquery['phiP']['='] == 180:
        pointing_dir = 'North'
    elif prod_step_1.Outputquery['phiP']['='] == 0:
        pointing_dir = 'South'
    zenith_angle = prod_step_1.Outputquery['thetaP']['=']

    # Here define the job description (i.e. Name, Executable, etc.)
    # to be associated to the first ProductionStep, as done when using the TS
    job1 = Prod5bMCPipeNSBJob()
    job1.version = '2020-06-29b'
    job1.compiler = 'gcc83_matchcpu'
    # Initialize JOB_ID
    job1.workflow.addParameter(Parameter("JOB_ID", "000000", "string", "", "",
                                         True, False, "Temporary fix"))
    # configuration
    job1.setName('Prod5b_MC_Pipeline_NSB')
    job1.set_site(site)
    job1.set_particle(particle)
    job1.set_pointing_dir(pointing_dir)
    job1.zenith_angle = zenith_angle
    job1.n_shower = 50000
    if particle == 'gamma':
        job1.n_shower = 20000

    job1.setOutputSandbox(['*Log.txt'])
    job1.start_run_number = '0'
    job1.run_number = '@{JOB_ID}'  # dynamic
    job1.setupWorkflow(debug=False)
    # Add the job description to the first ProductionStep
    prod_step_1.Body = job1.workflow.toXML()
    # return ProductionStep object
    return prod_step_1
Esempio n. 5
0
def build_evndisp_step(DL0_data_set, nsb=1, name_tag=''):
    ''' Define a new EventDisplay analysis production step

    @return ProductionStep object
    '''
    if nsb == 1:
        DIRAC.gLogger.notice('NSB1x Analysis')
        DL0_data_set_NSB = DL0_data_set
    elif nsb == 5:
        DIRAC.gLogger.notice('NSB5x Analysis')
        DL0_data_set_NSB = DL0_data_set.replace('NSB1x', 'NSB5x')

    prod_step_2 = ProductionStep()
    prod_step_2.Name = 'Analysis_'+DL0_data_set_NSB.replace('AdvancedBaseline_', '').replace('DL0', 'DL1')
    prod_step_2.Name += '%s' % name_tag
    prod_step_2.Type = 'DataReprocessing'  # This corresponds to the Transformation Type
    prod_step_2.Inputquery = get_dataset_MQ(DL0_data_set_NSB)
    prod_step_2.Outputquery = get_dataset_MQ(DL0_data_set_NSB.replace('DL0', 'DL1'))

    # Here define the job description to be associated to the second ProductionStep
    job2 = EvnDispProd5SingJob(cpuTime=259200.)
    job2.version = 'eventdisplay-cta-dl1-prod5.v03'
    job2.setName('Prod5b_EvnDisp_Singularity')
    # output
    job2.setOutputSandbox(['*Log.txt'])
    # refine output meta data if needed
    output_meta_data = copy(prod_step_2.Outputquery)
    job2.set_meta_data(output_meta_data)
    job2.set_file_meta_data(nsb=output_meta_data['nsb']['='])

    job2.ts_task_id = '@{JOB_ID}'  # dynamic
    job2.group_size = 5   # for the input files verification
    job2.setupWorkflow(debug=False)
    job2.setType('EvnDisp3')  # mandatory *here*
    prod_step_2.Body = job2.workflow.toXML()
    prod_step_2.GroupSize = 5  # have to match the above group size?
    # return ProductionStep object
    return prod_step_2
def runEvnDisp3(args):
    """ Simple wrapper to create a EvnDisp3RefJob and setup parameters
      from positional arguments given on the command line.

      Parameters:
      args -- (dataset_name group_size)
  """
    DIRAC.gLogger.notice('runEvnDisp3')
    # get arguments
    mode = args[0]

    if mode == 'TS':
        dataset_name = args[1]
        group_size = int(args[2])

    ################################
    job = EvnDisp3RefJobC7(cpuTime=432000)  # to be adjusted!!

    job.setName('EvnDisp3')
    job.setType('EvnDisp3')
    job.setOutputSandbox(['*Log.txt'])

    job.version = "prod3b_d20200521"
    # change here for Paranal or La Palma
    job.prefix = "CTA.prod4S"  # don't mind reference to prod4, it's prod3
    #  set calibration file and parameters file
    job.calibration_file = "prod3b.Paranal-20171214.ped.root"
    job.configuration_id = 6

    if mode == 'WMS':
        job.base_path = '/vo.cta.in2p3.fr/user/a/arrabito'
        job.ts_task_id = '1'
        output_meta_data = {
            'array_layout': 'Baseline',
            'site': 'Paranal',
            'particle': 'gamma',
            'phiP': 0.0,
            'thetaP': 20.0,
            job.program_category + '_prog': 'simtel',
            job.program_category + '_prog_version': job.version,
            'data_level': 0,
            'configuration_id': job.configuration_id
        }
        job.set_meta_data(output_meta_data)
        job.setupWorkflow(debug=True)
        # subtmit to the WMS for debug
        #job.setDestination('LCG.CNAF.it')
        res = submit_wms(job)
    elif mode == 'TS':
        input_meta_query = get_dataset_MQ(dataset_name)
        output_meta_data = copy(input_meta_query)
        job.set_meta_data(output_meta_data)
        job.ts_task_id = '@{JOB_ID}'  # dynamic
        job.setupWorkflow(debug=False)
        ### submit the workflow to the TS
        res = submit_trans(job, input_meta_query, group_size)
    else:
        DIRAC.gLogger.error(
            '1st argument should be the job mode: WMS or TS,\n\not %s' % mode)
        return None

    return res
def launch_job(args):
    """ Simple launcher to instanciate a Job and setup parameters
        from positional arguments given on the command line.

        Parameters:
        args -- mode (trans_name dataset_name group_size)
    """
    DIRAC.gLogger.notice('launch_job')
    # get arguments
    mode = args[0]

    if mode == 'TS':
        trans_name = args[1]
        dataset_name = args[2]
        group_size = int(args[3])

    # job setup - 72 hours
    job = Prod3DL1DataHandlerJob(cpuTime=259200.)
    # override for testing
    job.setName('Prod3_DL1DataHandler')
    # output
    job.setOutputSandbox(['*Log.txt'])
    # version
    job.version = 'v0.7.4'
    # configuration test or train "grid_config_test_02052019.yml"
    job.split_md = 'test'
    job.config_file_name = 'grid_config_%s_02052019.yml' % job.split_md

    # specific configuration
    if mode == 'WMS':
        job.base_path = '/vo.cta.in2p3.fr/user/b/bregeon'
        job.ts_task_id = '123'
        simtel_meta_data = {
            'array_layout': 'Baseline',
            'site': 'LaPalma',
            'particle': 'proton',
            'phiP': 180.0,
            'thetaP': 20.0,
            'nsb': 1,
            'split': job.split_md
        }
        job.set_meta_data(simtel_meta_data)
        job.setupWorkflow(debug=True)
        job.setType('EvnDisp3')  # mandatory *here*
        # subtmit to the WMS for debug
        #        job.setDestination('LCG.IN2P3-CC.fr')
        result = submit_wms(job)
    elif mode == 'TS':
        input_meta_query = get_dataset_MQ(dataset_name)
        # refine output directory meta data if needed
        output_meta_data = copy(input_meta_query)
        job.set_meta_data(output_meta_data)
        job.ts_task_id = '@{JOB_ID}'  # dynamic
        job.setupWorkflow(debug=False)
        job.setType('EvnDisp3')  # mandatory *here*
        result = submit_trans(job,
                              trans_name,
                              json.dumps(input_meta_query),
                              group_size,
                              with_file_mask=True)
    else:
        DIRAC.gLogger.error('1st argument should be the job mode: WMS or TS,\n\
                             not %s' % mode)
        return None

    return result
Esempio n. 8
0
def launch_job(args):
    """ Simple launcher to instanciate a Job and setup parameters
        from positional arguments given on the command line.

        Parameters:
        args -- mode (trans_name dataset_name group_size)
    """
    DIRAC.gLogger.notice('Launching jobs')
    # get arguments
    mode = args[0]

    if mode == 'TS':
        trans_name = args[1]
        dataset_name = args[2]
        group_size = int(args[3])

    # job setup - 72 hours
    job = EvnDispProd5Job(cpuTime=259200.)
    # override for testing
    job.setName('Prod5_EvnDisp')
    # output
    job.setOutputSandbox(['*Log.txt'])

    # specific configuration
    if mode == 'WMS':
        job.base_path = '/vo.cta.in2p3.fr/user/b/bregeon'
        job.ts_task_id = '111'
        simtel_meta_data = {'array_layout': 'Baseline-Advanced', 'site': 'LaPalma',
                           'particle': 'gamma', 'phiP': 0.0, 'thetaP': 20.0}
        job.prefix = 'CTA.prod5N'
        job.layout_list = 'BL-0LSTs05MSTs-MSTF BL-0LSTs05MSTs-MSTN \
                           BL-4LSTs00MSTs-MSTN BL-4LSTs05MSTs-MSTF \
                           BL-4LSTs05MSTs-MSTN BL-4LSTs09MSTs-MSTF \
                           BL-4LSTs09MSTs-MSTN BL-4LSTs15MSTs-MSTF \
                           BL-4LSTs15MSTs-MSTN'

        job.set_meta_data(simtel_meta_data)
        job.set_file_meta_data({'nsb':1})
        job.setupWorkflow(debug=True)
        # subtmit to the WMS for debug
        # job.setDestination('LCG.IN2P3-CC.fr')
        result = submit_wms(job)
    elif mode == 'TS':
        input_meta_query = get_dataset_MQ(dataset_name)
        # refine output meta data if needed
        output_meta_data = copy(input_meta_query)
        job.set_meta_data(output_meta_data)
        job.set_file_meta_data(nsb=output_meta_data['nsb']['='])
        if output_meta_data['site'] == 'LaPalma':
            job.prefix = 'CTA.prod5N'
            job.layout_list = 'BL-0LSTs05MSTs-MSTF BL-0LSTs05MSTs-MSTN \
                               BL-4LSTs00MSTs-MSTN BL-4LSTs05MSTs-MSTF \
                               BL-4LSTs05MSTs-MSTN BL-4LSTs09MSTs-MSTF \
                               BL-4LSTs09MSTs-MSTN BL-4LSTs15MSTs-MSTF \
                               BL-4LSTs15MSTs-MSTN'
            DIRAC.gLogger.notice('LaPalma layouts:\n',job.layout_list.split())
        elif output_meta_data['site'] == 'Paranal':
            DIRAC.gLogger.notice('Paranal layouts:\n',job.layout_list.split())
        # adjust calibration file
        if output_meta_data['nsb']['='] == 5:
            job.calibration_file = 'prod5/prod5-halfmoon-IPR.root'
        job.ts_task_id = '@{JOB_ID}'  # dynamic
        job.setupWorkflow(debug=False)
        job.setType('EvnDisp3')  # mandatory *here*
        result = submit_trans(job, trans_name, input_meta_query, group_size)
    else:
        DIRAC.gLogger.error('1st argument should be the job mode: WMS or TS,\n\
                             not %s' % mode)
        return None

    return result
Esempio n. 9
0
def launch_job(args):
    """ Simple launcher to instanciate a Job and setup parameters
        from positional arguments given on the command line.

        Parameters:
        args -- mode (trans_name dataset_name group_size)
    """
    DIRAC.gLogger.notice('run_evndisp_sst')
    # get arguments
    mode = args[0]

    if mode == 'TS':
        trans_name = args[1]
        dataset_name = args[2]
        group_size = int(args[3])

    # job setup - 72 hours
    job = EvnDisp4SSTJob(cpuTime=259200.)
    # override for testing
    job.setName('Prod4_Evndisp')
    # output
    job.setOutputSandbox(['*Log.txt'])

    # specific configuration
    if mode == 'WMS':
        job.base_path = '/vo.cta.in2p3.fr/user/b/bregeon'
        job.ts_task_id = '123'
        simtel_meta_data = {
            'array_layout': 'Baseline-SST-only',
            'site': 'Paranal',
            'particle': 'proton',
            'phiP': 0.0,
            'thetaP': 20.0
        }
        job.set_meta_data(simtel_meta_data)
        job.set_file_meta_data({'tel_config': 'sst-1m'})
        job.setupWorkflow(debug=True)
        # subtmit to the WMS for debug
        job.setDestination('LCG.IN2P3-CC.fr')
        result = submit_wms(job)
    elif mode == 'TS':
        input_meta_query = get_dataset_MQ(dataset_name)
        # refine output meta data if needed
        output_meta_data = copy(input_meta_query)
        job.set_meta_data(output_meta_data)
        file_meta_data = {key: output_meta_data[key] for key in ['tel_config']}
        job.set_file_meta_data(file_meta_data)
        if 'astri' in file_meta_data['tel_config']:
            job.layout = '3HB9-SST-A'

        job.ts_task_id = '@{JOB_ID}'  # dynamic
        job.setupWorkflow(debug=False)
        job.setType('EvnDisp3')  # mandatory *here*
        result = submit_trans(job, trans_name, json.dumps(input_meta_query),
                              group_size)
    else:
        DIRAC.gLogger.error('1st argument should be the job mode: WMS or TS,\n\
                             not %s' % mode)
        return None

    return result
def launch_job(args):
    """ Simple launcher to instanciate a Job and setup parameters
        from positional arguments given on the command line.

        Parameters:
        args -- mode (trans_name dataset_name group_size)
    """
    DIRAC.gLogger.notice('Launching jobs')
    # get arguments
    mode = args[0]

    if mode == 'TS':
        trans_name = args[1]
        dataset_name = args[2]
        group_size = int(args[3])

    # job setup - 72 hours
    job = Prod3Stage1Job(cpuTime=259200.)
    job.stage1_config = 'stage1_config_Prod3_LaPalma_Baseline_NSB1x.json'
    # override for testing
    job.setName('Prod3_ctapipe_stage1')
    # output
    job.setOutputSandbox(['*Log.txt'])

    # specific configuration
    if mode == 'WMS':
        job.base_path = '/vo.cta.in2p3.fr/user/b/bregeon'
        job.ts_task_id = '2'
        simtel_meta_data = {
            'array_layout': 'Baseline',
            'site': 'LaPalma',
            'particle': 'gamma',
            'phiP': 180.0,
            'thetaP': 20.0
        }

        job.set_meta_data(simtel_meta_data)
        job.set_file_meta_data({'nsb': 1})
        job.setupWorkflow(debug=True)
        # subtmit to the WMS for debug
        job.setDestination('LCG.IN2P3-CC.fr')
        result = submit_wms(job)
    elif mode == 'TS':
        job.base_path = '/vo.cta.in2p3.fr/MC/PROD3_Test'
        input_meta_query = get_dataset_MQ(dataset_name)
        # refine output meta data if needed
        output_meta_data = copy(input_meta_query)
        job.set_meta_data(output_meta_data)
        job.set_file_meta_data(nsb=output_meta_data['nsb']['='],
                               split=output_meta_data['split'])
        input_meta_query = {}
        job.ts_task_id = '@{JOB_ID}'  # dynamic
        job.setupWorkflow(debug=False)
        job.setType('EvnDisp3')  # mandatory *here*
        result = submit_trans(job, trans_name, input_meta_query, group_size)
    else:
        DIRAC.gLogger.error('1st argument should be the job mode: WMS or TS,\n\
                             not %s' % mode)
        return None

    return result