Пример #1
0
#!/usr/bin/env python

"""
Create ROOT DST output from LCIO recon files using the HPS DST Maker.
"""

from hpsmc.job import Job
from hpsmc.tools import DST 

# Initialize the job.
job = Job(name="DST job")
job.initialize()
params = job.params
input_files = job.params.input_files
output_files = job.params.output_files

# Run DST maker.
dst = DST(inputs=input_files.keys(),
        outputs=output_files.keys())

# Add job components.
job.components.append(dst)

# Run the job.
job.run()
Пример #2
0
#!/usr/bin/env python

import os
from hpsmc.job import Job
from hpsmc.tools import LCIOCount

job = Job(name="LCIO count job")
job.initialize()

output_files = sorted(job.params.output_files.keys())
if len(output_files) < 1:
    raise Exception(
        "Not enough output files were provided (at least 1 required).")

nevents = job.params.nevents

count = LCIOCount(minevents=nevents, inputs=output_files)
job.components = [count]
job.enable_copy_output_files = False
job.run()
Пример #3
0
"""
Python script for generating 'wab-beam-tri' events.
"""

import sys, random

import hpsmc.func as func
from hpsmc.job import Job
from hpsmc.run_params import RunParameters
from hpsmc.generators import MG5, StdHepConverter
from hpsmc.tools import StdHepTool, SLIC, JobManager

# job init
job = Job(name="tritrig job")
job.initialize()
params = job.params

# expected input files
tri_input = "tri.lhe.gz"
wab_input = "wab.lhe.gz"
beam_input = "beam.stdhep"

# check for required inputs
if beam_input not in params.input_files:
    raise Exception("Missing '%s' input file." % beam_input)
if wab_input not in params.input_files:
    raise Exception("Missing '%s' input file." % wab_input)
if tri_input not in params.input_files:
    raise Exception("Missing '%s' input file." % tri_input)

# get run params
Пример #4
0
from hpsmc.generators import MG4, StdHepConverter
from hpsmc.job import Job
from hpsmc.run_params import RunParameters
from hpsmc.tools import Unzip, StdHepTool

ap = MG4(description="Generate A-prime events with APMASS param",
         name="ap",
         run_card="run_card_1pt05.dat",
         params={"APMASS": 40.0},
         outputs=["ap"],
         rand_seed=1,
         nevents=500)

unzip = Unzip(inputs=["ap_events.lhe.gz"])

rand_seed = 1
z = 1.0
displ = StdHepTool(name="lhe_tridents_displacetime",
                   inputs=["ap_events.lhe"],
                   outputs=["ap.stdhep"],
                   args=["-s", str(rand_seed), "-l",
                         str(z)])

dump = StdHepTool(name="print_stdhep", inputs=["ap.stdhep"])

job = Job(name="AP test", components=[ap, unzip, displ, dump])
job.params = {}

job.run()
Пример #5
0
"""
Python script for generating 'wab-beam-tri' events.
"""

import os

from hpsmc.job import Job
from hpsmc.tools import SLIC, LCIOConcat

# job init
job = Job(name="SLIC job")
job.initialize()
params = job.params

if not len(params.input_files):
    raise Exception("Input file list is empty!")

if (len(params.input_files) != len(params.output_files)) and len(
        params.output_files) != 1:
    raise Exception("Input and output file lists are not the same length!")

# create a SLIC component for each input file
output_files = []
for io in zip(sorted(params.input_files.keys()),
              sorted(params.output_files.keys())):
    # each output file is named from the JSON list
    fname = os.path.splitext(io[1])[0] + ".slcio"
    output_files.append(fname)

    # generate events in slic using the input stdhep file
    slic = SLIC(description="Run detector simulation using SLIC",
Пример #6
0
from hpsmc.job import Job
from hpsmc.tools import HPSTR

# Initialize the job.
job = Job(name="HPSTR job")
job.initialize()

params = job.params
input_files = params.input_files
output_files = params.output_files

# Figure out input and output file lists depending on whether
# JSON data is a dict or list.
if isinstance(params.input_files, dict):
    infiles = params.input_files.keys()
else:
    infiles = params.input_files
if isinstance(params.output_files, dict):
    outfiles = params.output_files.keys()
else:
    outfiles = params.output_files

hpstr = HPSTR(cfg="recoTuple_cfg.py",
              run_mode=params.run_mode,
              inputs=infiles,
              outputs=outfiles)

job.components.append(hpstr)

job.run()
Пример #7
0
#!/usr/bin/env python
"""
Python script for generating 'wab-beam' events.
"""
import sys, random

import hpsmc.func as func
from hpsmc.job import Job
from hpsmc.run_params import RunParameters
from hpsmc.generators import MG5, StdHepConverter
from hpsmc.tools import StdHepTool, SLIC

# job init
job = Job(name="tritrig job")
job.initialize()
params = job.params

# expected input files
wab_input = "wab.lhe.gz"
beam_input = "beam.stdhep"

# check for required inputs
if beam_input not in params.input_files:
    raise Exception("Missing '%s' input file in job params." % beam_input)
if wab_input not in params.input_files:
    raise Exception("Missing '%s' input file in job params." % wab_input)

# get run params
run_params = RunParameters(key=params.run_params)

# calculate mu for wab sampling
Пример #8
0
#!/usr/bin/env python
"""
Python script for generating 'simp' events in MG5 and running through simulation, readout and reconstruction. 
"""

import sys, random

from hpsmc.job import Job
from hpsmc.run_params import RunParameters
from hpsmc.generators import MG5, StdHepConverter
from hpsmc.tools import SLIC, JobManager, FilterMCBunches, StdHepTool

job = Job(name="simp job")
job.initialize()

params = job.params

# used for intermediate file names
procname = "simp"

# generate tritrig in MG5
mg = MG5(
    name=procname,
    #run_card="run_card_"+params.run_params+".dat",
    run_card="run_card.dat",
    param_card="param_card.dat",
    params={
        "map": params.map,
        "mpid": params.mpid,
        "mrhod": params.mrhod
    },
Пример #9
0
#!/usr/bin/env python

import sys, os, argparse

from hpsmc.job import Job
from hpsmc.generators import MG4, StdHepConverter
from hpsmc.tools import Unzip, StdHepTool, SLIC, FilterMCBunches, JobManager, LCIOTool

job = Job(name="AP job")
job.initialize()

params = job.params
filename = "aprime"

# generate A-prime events using Madgraph 4
ap = MG4(name="ap",
         run_card="run_card_" + params.run_params + ".dat",
         params={"APMASS": params.apmass},
         outputs=[filename],
         nevents=params.nevents)

# unzip the LHE events to local file
unzip = Unzip(inputs=[filename + "_events.lhe.gz"])

# displace the time of decay using ctau param
displ = StdHepTool(name="lhe_tridents_displacetime",
                   inputs=[filename + "_events.lhe"],
                   outputs=[filename + ".stdhep"],
                   args=["-l", str(params.ctau)])

# rotate events into beam coordinates and move vertex by 5 mm
Пример #10
0
#!/usr/bin/env python

from hpsmc.job import Job
from hpsmc.generators import EGS5
from hpsmc.run_params import RunParameters

job = Job(name="EGS5 beam_v3 job")
job.initialize()

params = job.params

egs5 = EGS5(name="beam_v3",
    bunches=params.bunches,
    run_params=RunParameters(key=params.run_params),
    outputs=["beam.stdhep"])

job.components = [egs5]

job.run()
Пример #11
0
#!/usr/bin/env python
"""
Python script for generating 'tritrig' events in MG5 and running through simulation, readout and reconstruction. 
"""

import sys, random

from hpsmc.job import Job
from hpsmc.run_params import RunParameters
from hpsmc.generators import MG5, StdHepConverter
from hpsmc.tools import SLIC, JobManager, FilterMCBunches, StdHepTool

job = Job(name="tritrig job")
job.initialize()

params = job.params

# used for intermediate file names
procname = "tritrig"

# generate tritrig in MG5
mg = MG5(name=procname,
         run_card="run_card_" + params.run_params + ".dat",
         outputs=[procname],
         nevents=params.nevents)

# convert LHE output to stdhep
stdhep_cnv = StdHepConverter(run_params=RunParameters(key=params.run_params),
                             inputs=[procname + "_events.lhe.gz"],
                             outputs=[procname + ".stdhep"])
from hpsmc.job import Job
from hpsmc.tools import JobManager

job = Job(name="Job Manager Test")
job.initialize()
params = job.params

mgr = JobManager(steering_file=params.steering_file, inputs=params.input_files)
job.components = [mgr]
job.run()
Пример #13
0
#!/usr/bin/env python
"""
Python script for running readout 'simp' events through reconstruction.
"""

import sys, random

from hpsmc.job import Job
from hpsmc.tools import SLIC, JobManager, FilterMCBunches

job = Job(name="simp job")
job.initialize()

params = job.params
input_files = params.input_files

simp_files = []
for input_file in input_files:
    simp_files.append(input_file)

procname = "simp"
# run physics reconstruction
recon = JobManager(
    steering_resource=params.recon_steering,
    java_args=["-DdisableSvtAlignmentConstants"],
    defs={
        "detector": params.detector,
        "run": params.run
    },
    #inputs=[procname+"_readout.slcio"],
    inputs=simp_files,
Пример #14
0
#!/usr/bin/env python

from hpsmc.job import Job
from hpsmc.generators import EGS5
from hpsmc.run_params import RunParameters

# default params with 1.05 GeV beam params and 500 bunches of electrons
default_params = {"run_params": "1pt05", "bunches": 500}

job = Job(name="EGS5 beam_v5 job")
job.set_default_params(default_params)
job.initialize()

params = job.params

# generates a file called 'beam.stdhep'
egs5 = EGS5(name="beam_v5",
            bunches=params['bunches'],
            run_params=RunParameters(key=params['run_params']),
            outputs=["beam.stdhep"])

job.components = [egs5]

job.run()
Пример #15
0
import hpsmc.func as func
from hpsmc.job import Job
from hpsmc.run_params import RunParameters
from hpsmc.generators import MG5, StdHepConverter
from hpsmc.tools import StdHepTool, SLIC

def_params = {
    "run_params": "1pt05",
    "z": 0.0,
    "wab_filename": "wab.lhe.gz",
    "beam_filename": "beam.stdhep",
    "nevents": 500000
}

# job init with defaul params
job = Job(name="wab-beam job")
job.set_default_params(def_params)
job.initialize()

# get job params
params = job.params

# expected input files
wab_input = params["wab_filename"]
beam_input = params["beam_filename"]

# check for required input file names
if beam_input not in params.input_files:
    raise Exception("Missing expected beam file '%s' in input files." %
                    beam_input)
if wab_input not in params.input_files:
Пример #16
0
"""
Script to generate 'tritrig-wab-beam' events (Oh my!) from sets of input LCIO files.

Based on this Auger script:

https://github.com/JeffersonLab/hps-mc/blob/master/scripts/mc_scripts/readout/tritrig-wab-beam_1pt05_Nominal.xml

This script accepts an arbitrary number of input files per job but the two lists for 'tritrig' and 'wab-beam'
must be the same length or an error will be raised.

"""

from hpsmc.job import Job
from hpsmc.tools import FilterMCBunches, LCIOTool, JobManager

job = Job(name="tritrig job")
job.initialize()

params = job.params
input_files = params.input_files

tritrig_files = []
wab_files = []
for input_file in input_files:
    if "tritrig" in input_file:
        tritrig_files.append(input_file)
    elif "wab-beam" in input_file:
        wab_files.append(input_file)

if len(tritrig_files) != len(wab_files):
    raise Exception(
Пример #17
0
#!/usr/bin/env python
"""
Python script for running generating 'simp' events through PU simulation, readout.
"""

import sys, random

from hpsmc.job import Job
from hpsmc.tools import SLIC, JobManager, FilterMCBunches

job = Job(name="simp job")
job.initialize()

params = job.params
input_files = params.input_files

simp_files = []
for input_file in input_files:
    simp_files.append(input_file)

procname = "simp"

# insert empty bunches expected by pile-up simulation
filter_bunches = FilterMCBunches(java_args=["-DdisableSvtAlignmentConstants"],
                                 inputs=simp_files,
                                 outputs=[procname + "_filt.slcio"],
                                 ecal_hit_ecut=0.05,
                                 enable_ecal_energy_filter=True,
                                 nevents=2000000,
                                 event_interval=250)
Пример #18
0
from hpsmc.generators import MG5, StdHepConverter
from hpsmc.job import Job
from hpsmc.run_params import RunParameters

mg5 = MG5(description="Generate tritrig events using MG5",
          name="tritrig",
          run_card="run_card_1pt05.dat",
          outputs=["tritrig"],
          nevents=1000)

stdhep_cnv = StdHepConverter(
    description="Convert LHE events to StdHep using EGS5",
    run_params=RunParameters(key="1pt05"),
    inputs=["tritrig_events.lhe.gz"],
    outputs=["tritrig.stdhep"])

job = Job(name="MG5 test", components=[mg5, stdhep_cnv])
job.params = {}

job.run()
Пример #19
0
from hpsmc.job import Job
from hpsmc.generators import EGS5
from hpsmc.run_params import RunParameters

egs5 = EGS5(name="moller_v3",
    bunches=5000,
    run_params=RunParameters(key="1pt05"),
    outputs=["events.stdhep"])

job = Job(name="EGS5 Test", components=[egs5])
job.params = {}

job.run()
Пример #20
0
#!/usr/bin/env python
"""
Creates a tar archive with ROOT tuple files from LCIO recon file inputs.
"""

import sys, random, os.path

from hpsmc.job import Job
from hpsmc.tools import JobManager, TarFiles, MakeTree

# Initialize the job.
job = Job(name="Make tuples job")
job.initialize()
params = job.params
input_files = sorted(job.params.input_files.keys())
output_files = job.params.output_files

output_base, ext = os.path.splitext(input_files[0])
if (len(input_files) > 1):
    # Strip off extension from name if multiple file inputs.
    output_base, ext = os.path.splitext(output_base)

# Job parameters may optionally specify number of events to read from LCIO file.
if hasattr(params, "nevents"):
    nevents = params.nevents
else:
    nevents = -1

# Make text tuple outputs.
make_tuples = JobManager(steering_resource=params.tuple_steering,
                         run=params.run,
Пример #21
0
from hpsmc.job import Job
from hpsmc.generators import MG4, StdHepConverter
from hpsmc.tools import Unzip, StdHepTool, MoveFiles

# define default job parameters
def_params = {
    "nevents": 10000,
    "run_params": "1pt05",
    "apmass": 40.0,
    "ctau": 1.0,
    "z": -5.0,
}

# setup job including default parameters
job = Job(name="AP event gen job")
job.set_default_params(def_params)
job.initialize()

# get job params
params = job.params

# base file name
filename = "aprime"

# generate A-prime events using Madgraph 4
ap = MG4(name="ap",
         run_card="run_card_" + params['run_params'] + ".dat",
         params={"APMASS": params['apmass']},
         outputs=[filename],
         nevents=params['nevents'])
Пример #22
0
This script ignores the provided output destination and automatically names the output file
from the input source with the sequence numbers of the input files processed appended.
"""

import os
from hpsmc.job import Job
from hpsmc.tools import LCIOConcat

def split_file_number(filename):
    basefile = os.path.basename(os.path.splitext(filename)[0])
    file_number = basefile[basefile.rfind('_')+1:]
    basefile = basefile[:basefile.rfind('_')]
    return basefile, file_number

job = Job(name="LCIO concat job")
job.initialize()

input_files = sorted(job.params.input_files.keys())
if len(input_files) < 2:
    raise Exception("Not enough input files were provided (at least 2 required).")

"""
If no output file mapping is provided explicitly, then an auto-naming scheme is used which
concatenates the start and end file numbers onto the base name of the first input file.
"""
if not len(job.params.output_files):
    
    output_basename,start_filenum = split_file_number(job.params.input_files[input_files[0]])
    dontcare,end_filenum = split_file_number(job.params.input_files[input_files[-1]])
    
Пример #23
0
from hpsmc.job import Job
from hpsmc.tools import JobManager

# engrun2015 default settings
def_params = {
    "detector":
    "HPS-EngRun2015-Nominal-v6-0-fieldmap",
    "run":
    5772,
    "readout_steering":
    "/org/hps/steering/readout/EngineeringRun2015TrigPairs1_Pass2.lcsim"
}

# job init
job = Job(name="engrun2015 readout job")
job.set_default_params(def_params)
job.initialize()
params = job.params

# set number of events if present in params
nevents = -1
if "nevents" in params:
    nevents = params['nevents']

# run readout on all input files, assigning input files to output files from JSON names
for io in zip(sorted(params.input_files.keys()),
              sorted(params.output_files.keys())):

    # run simulated events in readout to generate triggers
    readout = JobManager(
Пример #24
0
"""

import os
from hpsmc.job import Job
from hpsmc.tools import StdHepTool, MoveFiles

# default parameters
def_params = { 
    "beam_sigma_x": 0.300,
    "beam_sigma_y": 0.030,
    "target_z": 0.0,
    "beam_rotation": 0.0305
}

# define job with defaults
job = Job(name="beam job")
job.set_default_params(def_params)
job.initialize()

# get params
params = job.params

# create component to rotate into beam coords for each input
rotated_files = []
for i in job.input_files.keys():
   
    fname = os.path.splitext(i)[0] + "_rot.stdhep"
    rotated_files.append(fname)

    rot = StdHepTool(name="beam_coords",
                 inputs=[i],
Пример #25
0
from hpsmc.tools import JobManager

# engrun2015 default parameters
def_params = {
    "nevents":
    -1,
    "detector":
    "HPS-EngRun2015-Nominal-v6-0-fieldmap",
    "run":
    5772,
    "recon_steering":
    "/org/hps/steering/recon/EngineeringRun2015FullReconMC.lcsim",
}

# job init
job = Job(name="MC reconstruction using EngRun2015 configuration")
job.set_default_params(def_params)
job.initialize()
params = job.params

# at least one input file is required
if not len(params.input_files):
    raise Exception("Input file list is empty!")

# at least one output file is required
if not len(params.output_files):
    raise Exception("Output file list is empty!")

# if using multiple input and multiple output files, the lists have to be same length
if (len(params.input_files) != len(params.output_files)) and len(
        params.output_files) != 1: