コード例 #1
0
ファイル: iobench.py プロジェクト: rushioda/PIXELVALID_athena
    def run( self ):

        self.data = []
        self.chronoStats= []
        self.athena.jobOptions += [
            ChapPy.JobOptions( "TestTools/IoAuditor_fragment.py" ),
            ChapPy.JobOptionsCmd( "svcMgr.ChronoStatSvc.AsciiStatsOutputFile = \"%s\"" % self.ioStatsFileName )
            ]
        for i in range(self.nTimes):
            self.athena.logFile = open( "%s.%s" % (self.logFileName,i), "w" )
            iStart = getResources()
            out = self.athena.run()
            self.data.append( getResources() - iStart )
            self.chronoStats.append( ChronoStatReport(self.ioStatsFileName) )
            pass

        return
コード例 #2
0
def testPerfMon(jobOptName="PerfMonTests/test_perfMonSvc_noopalg.py",
                perfMonFileName="perfmon.noopalg.pmon.gz",
                evtMax=50000):
    print ""
    print "#" * 80
    print "## testing PerfMonSvc [%s]" % jobOptName
    print "                  ==> [%s]... (%i)" % (perfMonFileName, evtMax)

    refPerfMonFileName = "ref." + os.path.basename(perfMonFileName)
    chkPerfMonFileName = "chk." + os.path.basename(perfMonFileName)
    outPerfMonFileName = "ana." + os.path.basename(perfMonFileName)

    ## create the reference file
    athena = ChapPy.Athena(jobOptions=[ChapPy.JobOptions(jobOptName)],
                           checkLeak=False,
                           logFile=refPerfMonFileName + ".log")
    athena.EvtMax = evtMax
    athena.run()
    os.rename("perfmon.pmon.gz", refPerfMonFileName)

    ## create the to-be-checked file
    athena = ChapPy.Athena(jobOptions=[ChapPy.JobOptions(jobOptName)],
                           checkLeak=False,
                           logFile=chkPerfMonFileName + ".log")
    athena.EvtMax = evtMax
    athena.run()
    os.rename("perfmon.pmon.gz", chkPerfMonFileName)

    print " :::running [perfmon.py]..."
    cmd = "perfmon.py -o %s %s %s --labels 'chk,ref'" % \
          ( outPerfMonFileName, chkPerfMonFileName, refPerfMonFileName )
    sc, out = commands.getstatusoutput(cmd)
    if sc != 0:
        print "## Problem while doing [perfmon] !!"
        print out
    else:
        print "## [DONE]"

    return sc
コード例 #3
0
def testPerfMon(jobOptName="PerfMonTests/test_perfMonSvc_noopalg.py",
                perfMonFileName="perfmon.noopalg.root",
                evtMax=50000):
    print("")
    print("#" * 80)
    print("## testing PerfMonSvc [%s]" % jobOptName)
    print("                  ==> [%s]... (%i)" % (perfMonFileName, evtMax))

    perfMonFileName = workDir(perfMonFileName)
    refPerfMonFileName = "ref." + os.path.basename(perfMonFileName)
    chkPerfMonFileName = "chk." + os.path.basename(perfMonFileName)
    outPerfMonFileName = "ana." + os.path.basename(perfMonFileName)

    refPerfMonFileName = refPerfMonFileName.replace(".root", ".pmon.gz")
    chkPerfMonFileName = chkPerfMonFileName.replace(".root", ".pmon.gz")

    ## create the reference file
    athena = ChapPy.Athena(jobOptions=[
        ChapPy.JobOptions(jobOptName),
        ChapPy.JobOptionsCmd("jobproperties.PerfMonFlags.OutputFile = '%s'" %
                             refPerfMonFileName)
    ],
                           checkLeak=False,
                           logFile=refPerfMonFileName + ".log")
    athena.EvtMax = evtMax
    sc = athena.run()
    if sc != 0:
        print("ERROR: could not create the 'ref' perfmon file !!")
        return ScOutput(sc, "ERROR")

    ## create the to-be-checked file
    athena = ChapPy.Athena(jobOptions=[
        ChapPy.JobOptions(jobOptName),
        ChapPy.JobOptionsCmd("jobproperties.PerfMonFlags.OutputFile = '%s'" %
                             chkPerfMonFileName)
    ],
                           checkLeak=False,
                           logFile=chkPerfMonFileName + ".log")
    athena.EvtMax = evtMax
    sc = athena.run()
    if sc != 0:
        print("ERROR: could not create the 'chk' perfmon file !!")
        return ScOutput(sc, "ERROR")

    #outPerfMonFileName = "ana." + os.path.basename( perfMonFileName )
    print(" :::running [perfmon.py]...")
    cmd = "perfmon.py %s %s -o %s" % \
          ( chkPerfMonFileName, refPerfMonFileName, outPerfMonFileName )
    sc, out = subprocess.getstatusoutput(cmd)
    if sc != 0:
        print("## Problem while doing [perfmon] !!")
        print(out)
        out = "ERROR"
        return ScOutput(sc, out)

    out = "OK"
    print("## [DONE]")
    return ScOutput(sc, out)
コード例 #4
0
uuid = "%s_%s" % (os.getpid(),timeofday())
genEventFile = workDir("mc.event.%s.pool"%uuid)
mcAodFile    = workDir("mc.aod.%s.pool"%uuid)

print "#"*80
print "## testing TruthParticleContainer I/O..."
print "## Job uuid:",uuid
benchSequence = BenchSequence( "TruthParticleContainer I/O" )

###-----------------------------------------------------
if os.path.exists(genEventFile):
    print "## Re-using input data..."
else:
    print "## Preparing input data..."
    jobOptions = [
        ChapPy.JobOptionsCmd( "OUTPUT=\"%s\"" % genEventFile ),
        ChapPy.JobOptionsCmd( "DUMP=False" ),
        ChapPy.JobOptions( "McParticleTests/iotest_WriteGenEvent_jobOptions.py" ),
        ]
    athena = ChapPy.Athena( jobOptions = jobOptions,
                            checkLeak  = False )
    athena.EvtMax = EVTMAX
    athena.run()
    pass

###-----------------------------------------------------
print "\n\n"
print "#"*80
print "## Testing [writing]..."
jobOptions = [
    ChapPy.JobOptionsCmd( "INPUT=[\"%s\"]" % genEventFile ),
コード例 #5
0
DUMP = True;
include( 'McParticleTests/rbtest_ReadGenEvent_jobOptions.py' );
outStream.OutputFile = '%(OutputFile)s';
topSequence.GenEventAsciiWriter.McWriter.McEvents = 'TruthEvent';
topSequence.GenEventAsciiWriter.McWriter.Output = '%(AsciiChk)s';
"""

if 0:
    ###-----------------------------------------------------
    print "\n"
    print "#" * 80
    print "## Testing [reading 12.0.4 data]..."
    jobOptions = [
        ChapPy.JobOptionsCmd(
            templateJobO % {
                'InputFile': asciiFiles['rel_1204']['in'],
                'OutputFile': workDir("reaccessed.mc.events_rel12.pool"),
                'AsciiChk': asciiFiles['rel_1204']['chk'],
            })
    ]
    athena = ChapPy.Athena(jobOptions=jobOptions,
                           logFile=workDir("read.genevent.rel_1204.log"),
                           checkLeak=False)
    athena.EvtMax = 10
    athena.run()

    ###-----------------------------------------------------
    print "\n"
    print "#" * 80
    bench += doValidation(asciiFiles, 'rel_1204')
    pass
コード例 #6
0
## first, create an input BS file
input_file_list = ['my1.data', 'my2.data', 'my3.data']
output_file_name = 'reaccessed.my.data'

## import PyUtils.AthFile as af
## af.server.flush_cache()

import os
import AthenaCommon.ChapPy as accp
num_evt = 0
run_num = 0
pos = 44
for input_file_name in input_file_list:
    num_evt += 5
    run_num += 1
    app = accp.AthenaApp()
    app << """
    import AthenaCommon.AtlasUnixGeneratorJob
    theApp.EvtMax = %(num_evt)s
    svcMgr.EventSelector.RunNumber = %(run_num)s
    include( "ByteStreamCnvSvc/RDP_ByteStream_jobOptions.py" )
    from AthenaCommon.AthenaCommonFlags import athenaCommonFlags
    athenaCommonFlags.BSRDOOutput = '%(input_file_name)s'
    from ByteStreamCnvSvc import WriteByteStream
    StreamBSFileOutput = WriteByteStream.getStream( "EventStorage", "StreamBSFileOutput" )
    """ % globals()

    print "=== create an elephantino file..."
    rc = app.run(stdout=os.devnull)
    if rc:
        raise RuntimeError(rc)
コード例 #7
0
import commands
from AthenaCommon import ChapPy

###-----------------------------------------------------
## For compatibility with ATN tests
from TestTools.iobench import workDir

###-----------------------------------------------------
## Little helper to validate output of jobs
from TestTools.iobench import ScOutput
from TestTools.iobench import BenchSequence

print "#" * 80
print "## StoreGate test... [producer/consumer-bench]"
print "#" * 80
athena = ChapPy.Athena(jobOptions=[
    ChapPy.JobOptions("StoreGateTests/test_sgProducerConsumer_jobOptions.py")
])
athena.EvtMax = 100
sc = athena.run()
if sc != 0: print "ERROR"
else:
    commands.getstatusoutput("perfmon.py %s -o %s" %
                             ("perfmon.pmon.gz", "out.sg.perfmon.root"))
    print "All tests SUCCESSFULLY completed"

print ""
print "#" * 80
print "## Bye."
print "#" * 80
コード例 #8
0
def merge_pool_files(input_files, output_file,
                     nevts=-1,
                     msg=None,
                     logfile=None):
    """take a bunch of input pool files and produce a single one.
    autoconfiguration (through RecExCommon) is (attempted to be) performed.
    """
    if msg is None:
        from .Logging import logging
        msg = logging.getLogger('pool-merge')
        msg.setLevel(logging.INFO)

    import sys
    if logfile is None:
        logfile = sys.stdout
    else:
        logfile = open(logfile, 'w')

    """
    # XXX: should we get rid of duplicates ?
    #input_files = list(set(input_files))
    if len(input_files) <= 1:
        msg.error('not enough input files: %s', input_files)
        return 2
        
    import PyUtils.AthFile as af
    try:
        af.server
    except (RuntimeError,), err:
        # FIXME: we should not rely on such fragile error detection
        if err.message == "AthFileServer already shutdown":
            af.restart_server()
            
    # optimization...
    try:
        _af_cache_fname = 'recexcommon-afserver-cache.ascii'
        af.server.load_cache(_af_cache_fname)
    except (IOError,), err:
        msg.info('could not load AthFile.server cache from [%s]:\n%s',
                 _af_cache_fname, err)

    # another optimization
    _af_cache_fname = 'merge-poolfiles-afserver-cache.ascii'
    fi = af.fopen(input_files[0])
    af.server.save_cache(_af_cache_fname)

    # make sure we do deal with POOL files
    if fi.infos['file_type'] != 'pool':
        msg.error('all input files are not POOL ones !')
        return 3

    # guess input file type...
    input_type = af._guess_file_type(input_files[0], msg)
    """
    
    import AthenaCommon.ChapPy as api
    app = api.AthenaApp()

    import textwrap
    app << textwrap.dedent("""\
    # automatically generated joboptions file

    # input files configuration
    from AthenaCommon.AthenaCommonFlags import athenaCommonFlags as acf
    input_files = %(input-files)s

    import AthenaCommon.Logging as _L
    msg = _L.log

    # events to process
    acf.EvtMax = EvtMax = theApp.EvtMax = %(evts)s

    # configure the copy job
    import AthenaPython.ConfigLib as apcl
    cfg = apcl.AutoCfg(name='merge-files',
                       input_files=input_files,
                       output_file='%(output-file)s')
    cfg.configure_job()

    if cfg.is_rdo() or cfg.is_esd() or cfg.is_aod() or cfg.is_tag():
        # main jobos
        include ('RecExCond/RecExCommon_flags.py')
        include ('RecExCommon/RecExCommon_topOptions.py')
    elif cfg.is_hits():
        import AthenaCommon.DetFlags as acdf
        acdf.DetFlags.detdescr.all_setOn()
        import AtlasGeoModel.SetGeometryVersion
        import AtlasGeoModel.GeoModelInit
        import AtlasGeoModel.SetupRecoGeometry
    else:
        pass
        
    # adding the merged output-stream
    
    """) % {
        #'input-type'   : input_type.upper(),
        'input-files'  : input_files,
        'output-file'  : output_file,
        'evts'         : nevts,
        #'af-cache-name': _af_cache_fname,
        }
    
    msg.info(':'*40)
    msg.info('input files: %s', input_files)
    #msg.info('input type:  %s', input_type)
    msg.info('events:      %s', nevts)
    msg.info('output file: %s', output_file)
    msg.info(':'*40)
    msg.info('running merger...')
    
    import AthenaCommon.ExitCodes as ath_codes
    sc = app.run(stdout=logfile)
    
    msg.info('running merger... [done]')
    msg.info('athena status-code: sc=[%s] (%s)', sc, ath_codes.what(sc))

    if logfile not in (sys.stdout, sys.stderr):
        logfile.close()
        pass

    return sc
コード例 #9
0
# @file mp_genevt_test.py
# @purpose: simple file to create a few ttbar events and read them back
#           with athena-mp

from __future__ import print_function

input_file_name = 'mc.event.pool'
output_file_name = 'reaccessed.mc.event.pool'

import PyUtils.AthFile as af
af.server.flush_cache()

import os
import AthenaCommon.ChapPy as accp
app = accp.AthenaApp()
app << """
EVTMAX=1000
OUTPUT='%(input_file_name)s'
""" % globals()
app.include('McParticleTests/iotest_WriteGenEvent_jobOptions.py')

evt_logfile = open('mp.evgen.logfile.txt', 'w+')
print("=== create an EVGEN file...")
rc = app.run(stdout=evt_logfile)
if rc:
    raise RuntimeError(rc)
print("=== create an EVGEN file... [ok]")

app = accp.AthenaApp(cmdlineargs=['--nprocs=-1'])
app << """
###-----------------------------------------------------
## For compatibility with ATN tests
from TestTools.iobench import workDir

###-----------------------------------------------------
## Little helper to validate output of jobs
from TestTools.iobench import ScOutput
from TestTools.iobench import BenchSequence

print "#" * 80
print "## StoreGate test... [producer/consumer-DataPool-bench]"
print "#" * 80
print ":::   No DataPool ",
athena = ChapPy.Athena(jobOptions=[
    ChapPy.JobOptionsCmd("SGDATAPOOL=False"),
    ChapPy.JobOptions("StoreGateTests/test_sgProducerConsumer_jobOptions.py"),
    ChapPy.JobOptionsCmd("jp.PerfMonFlags.OutputFile = 'nodatapool.pmon.gz'"),
])
athena.EvtMax = 100
sc = athena.run()
if sc != 0:
    print "ERROR"
    sys.exit(sc)

print "::: With DataPool ",
athena = ChapPy.Athena(jobOptions=[
    ChapPy.JobOptionsCmd("SGDATAPOOL=True"),
    ChapPy.JobOptions("StoreGateTests/test_sgProducerConsumer_jobOptions.py"),
    ChapPy.JobOptionsCmd(
        "jp.PerfMonFlags.OutputFile = 'withdatapool.pmon.gz'"),
])
コード例 #11
0
def doReadWriteTest(genName="pythia", evtMax=100):
    """A simple wrapper around the read/write tests..."""
    genName = genName.lower()
    ###-----------------------------------------------------
    print("")
    print("#" * 80)
    print("## Testing [writing-%s]..." % genName)
    templateJobO = """
OUTPUT='%(OutputFile)s';
DUMPTUPLE=True;
GENERATOR='%(Generator)s';
TUPLEFILENAME='%(TupleFile)s';
include( 'McParticleTests/iotest_WriteGenEvent_jobOptions.py' );
jobproperties.PerfMonFlags.OutputFile = '%(PmonFile)s';
"""
    jobOptions = [
        ChapPy.JobOptionsCmd( templateJobO % {
        'OutputFile' : workDir("mc.event.%s.%s.pool" % (genName,uuid)),
        'Generator'  : genName.upper(),
        'TupleFile'  : outFiles['gen_%s' % genName]['ref']\
                       .replace(".pool",".tuple.root"),
        'PmonFile'   : workDir("write.genevent.%s.%s.pmon.gz"%(genName,uuid)),
        } )
        ]

    athena = ChapPy.Athena(jobOptions=jobOptions,
                           logFile=workDir("write.genevent.%s.%s.log" %
                                           (genName, uuid)),
                           checkLeak=False)
    athena.EvtMax = evtMax
    sc = athena.run()
    if sc != 0:
        return ScOutput(sc, "ERROR")
    else:
        inFile = workDir("write.genevent.%s.%s.pmon.gz" % (genName, uuid))
        outFile = workDir("out.write.genevent.%s.%s.pmon.root" %
                          (genName, uuid))
        print(subprocess.getoutput("perfmon.py %s -o %s" % (inFile, outFile)))

    ###-----------------------------------------------------
    print("\n")
    print("#" * 80)
    print("## Testing [reading-%s]..." % genName)
    templateJobO = """
INPUT=['%(InputFile)s'];
DUMPTUPLE=True;
TUPLEFILENAME='%(TupleFile)s';
include( 'McParticleTests/iotest_ReadGenEvent_jobOptions.py' );
jobproperties.PerfMonFlags.OutputFile = '%(PmonFile)s';
"""
    jobOptions = [
        ChapPy.JobOptionsCmd( templateJobO % {
        'InputFile' : workDir("mc.event.%s.%s.pool" % (genName,uuid)),
        'TupleFile' : outFiles['gen_%s' % genName]['chk']\
                      .replace(".pool",".tuple.root"),
        'PmonFile'  : workDir("read.genevent.%s.%s.pmon.gz" % (genName,uuid)),
        } )
    ]
    athena = ChapPy.Athena(jobOptions=jobOptions,
                           logFile=workDir("read.genevent.%s.%s.log" %
                                           (genName, uuid)),
                           checkLeak=False)
    athena.EvtMax = evtMax
    sc = athena.run()
    if sc != 0:
        return ScOutput(sc, "ERROR")
    else:
        inFile = workDir("read.genevent.%s.%s.pmon.gz" % (genName, uuid))
        outFile = workDir("out.read.genevent.%s.%s.pmon.root" %
                          (genName, uuid))
        print(subprocess.getoutput("perfmon.py %s -o %s" % (inFile, outFile)))

    ###-----------------------------------------------------
    print("\n")
    print("#" * 80)
    return doTupleValidation(
        outFiles['gen_%s' % genName]['ref'].replace(".pool", ".tuple.root"),
        outFiles['gen_%s' % genName]['chk'].replace(".pool", ".tuple.root"))
コード例 #12
0
            '--out %(output)s',
            '%(files)s',
        ])
        evt_list = [str(i) for _, i in args.selection]
        run_list = [str(i) for i, _ in args.selection if not i is None]
        cmd = cmd % {
            'evt-list': ','.join(evt_list),
            'run-list':
            '' if len(run_list) == 0 else '-r ' + ','.join(run_list),
            'output': args.output,
            'files': ' '.join(args.files),
        }
        return subprocess.call(cmd.split())

    import AthenaCommon.ChapPy as api
    app = api.AthenaApp()
    app << """
    import AthenaCommon.Constants as Lvl
    from AthenaCommon.AthenaCommonFlags import jobproperties as jp
    acf = jp.AthenaCommonFlags
    acf.FilesInput = %(files)s

    # events to process
    acf.EvtMax = EvtMax = theApp.EvtMax = -1

    # configuration
    import AthenaPython.ConfigLib as apcl
    cfg = apcl.AutoCfg(
        name='filter-files',
        input_files=acf.FilesInput(),
        output_file='%(output)s')
コード例 #13
0
print("#" * 80)
print("## testing Thinning exercize...")
print("## installing reference files...")
installRefFiles([
    "AthExThinning_makeData.ref",
    "WriteThinnedData.ref",
    "ReadThinnedData.ref",
    "ReadNonThinnedData.ref",
])

###-----------------------------------------------------
print("\n")
print("#" * 80)
print("## Preparing input data...")
jobOptions = [
    ChapPy.JobOptionsCmd("OUTPUT=\"%s\"" % workDir("my.data.tothin.pool")),
    ChapPy.JobOptions("AthExThinning/AthExThinning_makeData.py"),
]

athena = ChapPy.Athena(jobOptions=jobOptions,
                       logFile=workDir("my.data.tothin.pool.log"),
                       checkLeak=False)
athena.EvtMax = EVTMAX
athena.run()

###-----------------------------------------------------
print("\n")
print("#" * 80)
sc, out = doValidation("Input Data", workDir("AthExThinning_makeData.ref"),
                       workDir("my.data.tothin.pool.log"),
                       "grep '^CreateData' | grep INFO ")
コード例 #14
0
    def _process_call(self, fname, evtmax, projects=['AtlasCore']):
        msg = self.msg()
        f = _create_file_infos()
        protocol, _ = self.server.fname(fname)
        f_raw  = self._root_open(fname, raw=True)
        if f_raw is None or not f_raw:
            raise IOError(
                errno.ENOENT,
                'No such file or directory',
                fname)
        f_root = f_raw
        try:
            file_type, file_name = self.server.ftype(f_raw)

            protocol,file_name = self.server.fname(fname)
            f['file_md5sum'] = self.server.md5sum(f_raw)
            f['file_name'] = file_name
            f['file_type'] = file_type
            f['file_size'] = f_raw.GetSize()
            if file_type == 'pool':
                f_root = self._root_open(fname, raw=False)
                # POOL files are most nutritious when known to PoolFileCatalog.xml
                # FIXME: best would be to do that in athfile_peeker.py but
                #        athena.py closes sys.stdin when in batch, which confuses
                #        PyUtils:subprocess.getstatusoutput
                #
                # ATEAM-192: avoid the PoolFileCatalog.xml conflict
                #cmd = ['pool_insertFileToCatalog.py',
                #       file_name,]
                #subprocess.call(cmd, env=self._sub_env)
                #
                if True:
                    is_tag, tag_ref, tag_guid, nentries, runs, evts = self._is_tag_file(f_root, evtmax)
                    if is_tag:
                        f['stream_names'] = ['TAG']
                        f['file_guid'] = tag_guid
                        f['nentries'] = nentries
                        f['run_number'] = runs
                        f['evt_number'] = evts
                    else:
                        import tempfile
                        fd_pkl,out_pkl_fname = tempfile.mkstemp(suffix='.pkl')
                        os.close(fd_pkl)
                        if os.path.exists(out_pkl_fname):
                            os.remove(out_pkl_fname)
                        print ("\n  ---------   running Athena peeker")
                        print (os.environ.get('CMTPATH',''))

                        import AthenaCommon.ChapPy as api
                        app = api.AthenaApp(cmdlineargs=["--nprocs=0"])
                        app << """
                            FNAME = %s
                            """ % str([file_name])
                        app << """
                            import os
                            # prevent from running athena in interactive mode (and freeze)
                            if 'PYTHONINSPECT' in os.environ:
                                del os.environ['PYTHONINSPECT']
            

                            include('AthenaPython/athfile_peeker.py')
                            from AthenaCommon.AlgSequence import AlgSequence
                            job = AlgSequence()
                            # we don't really need this...
                            job.peeker.outfname='%(outfname)s'
                            job.peeker.infname='%(infname)s'

                            # metadata + taginfo
                            import IOVDbSvc.IOVDb

                            # evt-max
                            theApp.EvtMax = %(evtmax)i
                            """ % {
                            'infname' : file_name,
                            'outfname': out_pkl_fname,
                            'evtmax': evtmax,
                            }
                        import uuid
                        stdout_fname = (
                            'athfile-%i-%s.log.txt' %
                            (os.getpid(), uuid.uuid4())
                            )
                        stdout = open(stdout_fname, "w")
                        print ("="*80, file=stdout)
                        print (self._sub_env, file=stdout)
                        print ("="*80, file=stdout)
                        stdout.flush()
                        if DEFAULT_AF_RUN:
                            sc = app.run(stdout=stdout, env=self._sub_env)
                        else:
                            import PyUtils.FilePeekerTool as fpt
                            fp = fpt.FilePeekerTool(f_root)
                            sc, fp_pkl_fname = fp.run()
                            # revert to athena sub-process in case of file with old schema
                            if sc == 0:
                                out_pkl_fname = fp_pkl_fname
                            else:
                                sc = app.run(stdout=stdout, env=self._sub_env)
                        stdout.flush()
                        stdout.close()
                        import AthenaCommon.ExitCodes as ath_codes
                        if sc == 0:
                            #import shelve
                            import PyUtils.dbsqlite as dbsqlite
                            msg.info('extracting infos from [%s]...',
                                     out_pkl_fname)
                            db = dbsqlite.open(out_pkl_fname)
                            msg.info('keys: %s',db.keys())
                            f.update(db['fileinfos'])
                            db.close()
                            msg.info('extracting infos from [%s]... [ok]',
                                     out_pkl_fname)
                            os.remove(stdout.name)
                        else:
                            # maybe an empty file
                            # trust but verify
                            if not self._is_empty_pool_file(f_root):
                                # actually a problem in athena !
                                from textwrap import dedent
                                err = dedent("""
                                %s
                                problem running chappy!
                                code: [%s (%s)]
                                what: [%s]
                                => corrupted input file ?
                                %s
                                logfile: [%s]
                                """% (":"*25,
                                      sc,errno.errorcode.get(sc,sc),
                                      ath_codes.codes.get(sc,sc),
                                      ":"*25,
                                      stdout.name
                                      ))
                                msg.error(err)
                                raise IOError(sc, err)
                            msg.info('athena failed to initialize.')
                            msg.info('=> probably an empty input POOL file')
                    # TAG-file
            else: # bytestream
                bs_fileinfos = self._process_bs_file(file_name,
                                                     evtmax=evtmax,
                                                     full_details=False)
                del bs_fileinfos['file_name']
                del bs_fileinfos['file_size']
                del bs_fileinfos['file_type']
                del bs_fileinfos['file_md5sum']
                f.update(bs_fileinfos)
        finally:
            try:
                f_raw.Close()
                f_root.Close()
                del f_raw
                del f_root
            except Exception as err:
                msg.warning(
                    'problem while closing raw and root file handles:\n%s',
                    err
                    )
        return f
コード例 #15
0
print "#" * 80
print "## testing slimming exercize..."
print "## installing reference files..."
installRefFiles([
    "AthExThinning_makeData.ref",
    "pyWriteSlimmedData.ref",
    "ReadSlimmedData.ref",
    "ReadNonSlimmedData.ref",
])

###-----------------------------------------------------
print "\n"
print "#" * 80
print "## Preparing input data..."
jobOptions = [
    ChapPy.JobOptionsCmd("OUTPUT=\"%s\"" % workDir("my.pydata.toslim.pool")),
    ChapPy.JobOptions("AthExThinning/AthExThinning_makeData.py"),
]

athena = ChapPy.Athena(jobOptions=jobOptions,
                       logFile=workDir("my.pydata.toslim.pool.log"),
                       checkLeak=False)
athena.EvtMax = EVTMAX
athena.run()

###-----------------------------------------------------
print "\n"
print "#" * 80
sc, out = doValidation("Input Data", workDir("AthExThinning_makeData.ref"),
                       workDir("my.pydata.toslim.pool.log"),
                       "grep '^CreateData' | grep INFO ")
コード例 #16
0
def main(args):
    """filter multiple input (pool/bs) files"""

    exitcode = 0

    import PyUtils.Logging as L
    msg = L.logging.getLogger('filter-files')
    msg.setLevel(L.logging.INFO)

    msg.info(':' * 40)

    import os.path as osp
    args.files = [
        osp.expandvars(osp.expanduser(fname)) for fname in args.files
    ]

    args.selection = osp.expandvars(osp.expanduser(args.selection))

    msg.info('input files: %s', args.files)
    msg.info('output file: %s', args.output)
    msg.info('selection:   %s', args.selection)

    import os
    if os.path.exists(args.selection):
        selection = []
        with open(args.selection, 'r') as s:
            for line in s:
                if line.strip().startswith('#'):
                    continue
                l = line.strip().split()
                if len(l) == 1:  # assume this is only the event number
                    runnbr, evtnbr = None, int(l[0])
                elif len(l) == 2:  # a pair (run,evt) number
                    runnbr, evtnbr = int(l[0]), int(l[1])
                else:
                    raise RuntimeError(
                        'file [%s] has invalid format at line:\n%r' %
                        (args.selection, line))
                selection.append((runnbr, evtnbr))
    else:
        try:
            args.selection = eval(args.selection)
        except Exception as err:
            msg.error('caught:\n%s', err)
            msg.error('.. while trying to parse selection-string')
            import traceback
            traceback.print_exc()
            return 1

        selection = []
        for item in args.selection:
            if not isinstance(item, (tuple, list) + six.integer_types):
                raise TypeError('type: %r' % type(item))

            if isinstance(item, (tuple, list)):
                if len(item) == 1:
                    runnbr, evtnbr = None, int(item[0])
                elif len(item) == 2:
                    runnbr, evtnbr = int(item[0]), int(item[1])
                else:
                    raise RuntimeError('item [%s] has invalid arity (%s)' %
                                       (item, len(item)))
            else:
                runnbr, evtnbr = None, int(item)
            selection.append((runnbr, evtnbr))

    # put back the massaged selection into our workspace
    args.selection = selection[:]

    import PyUtils.AthFile as af
    fi = af.fopen(args.files[0]).infos
    af.save_cache()

    if fi['file_type'] == 'bs':
        # optimization: run directly 'AtlCopyBSEvent.exe
        import subprocess
        cmd = ' '.join([
            'AtlCopyBSEvent',
            '-e %(evt-list)s',
            '%(run-list)s',
            '--out %(output)s',
            '%(files)s',
        ])
        evt_list = [str(i) for _, i in args.selection]
        run_list = [str(i) for i, _ in args.selection if i is not None]
        cmd = cmd % {
            'evt-list': ','.join(evt_list),
            'run-list':
            '' if len(run_list) == 0 else '-r ' + ','.join(run_list),
            'output': args.output,
            'files': ' '.join(args.files),
        }
        return subprocess.call(cmd.split())

    import AthenaCommon.ChapPy as api
    app = api.AthenaApp()
    app << """
    import AthenaCommon.Constants as Lvl
    from AthenaCommon.AthenaCommonFlags import jobproperties as jp
    acf = jp.AthenaCommonFlags
    acf.FilesInput = %(files)s

    # events to process
    acf.EvtMax = EvtMax = theApp.EvtMax = -1

    # configuration
    import AthenaPython.ConfigLib as apcl
    cfg = apcl.AutoCfg(
        name='filter-files',
        input_files=acf.FilesInput(),
        output_file='%(output)s')

    for type_name in ('evgen',
                      'hits',
                      'rdo',
                      'esd',
                      'aod',
                      'tag',
                      'usr',):
        res = getattr(cfg, 'is_' + type_name)()
        if res:
            cfg.msg.info('input file type is ['+type_name+']')
            break
    else:
        cfg.msg.info('input file stream is of unknown type')
        cfg.msg.info('autoconfiguration might not work...')
        pass
        
    # add the filtering algorithm
    # get a handle on the job main sequence
    import AthenaCommon.AlgSequence as acas
    job = acas.AlgSequence()
    
    ## filter configuration ##
    ##  -> we use the special sequence 'AthMasterSeq' which
    ##      is run before any other algorithm (which are usually in the
    ##      'TopAlg' sequence
    seq = acas.AthSequencer('AthMasterSeq')
    
    import GaudiSequencer.PyComps as gspc
    seq += gspc.PyEvtFilter(
       'filter_pyalg',
       # the store-gate key. leave as an empty string to take any eventinfo instance
       evt_info=None,
       OutputLevel=Lvl.INFO)
    seq.filter_pyalg.evt_list = %(selection)s   


    cfg.configure_job()

    if (cfg.is_rdo() or
        cfg.is_esd() or
        cfg.is_aod()):
        # main jobos
        include ('RecExCond/RecExCommon_flags.py')
        include ('RecExCommon/RecExCommon_topOptions.py')

    """ % args.__dict__

    stdout = None
    exitcode = app.run(stdout=stdout)
    return exitcode
コード例 #17
0
def main(args):
    """take a bunch of input (pool/bs) files and produce a single one
    """
    exitcode = 0

    import PyUtils.Logging as L
    msg = L.logging.getLogger('merge-files')
    msg.setLevel(L.logging.INFO)

    msg.info(':'*40)
    msg.info('welcome to merge-files version %s', __version__)

    import os.path as osp
    args.files = [ osp.expandvars(osp.expanduser(fname))
                   for fname in args.files ]

    msg.info('input files: %s', args.files)
    msg.info('output file: %s', args.output)
    msg.info('evts to process: %s', args.evts)
    msg.info('log-files: %s', args.logfile)

    import AthenaCommon.ChapPy as api
    app = api.AthenaApp()
    app << """
    from AthenaCommon.AthenaCommonFlags import jobproperties as jp
    acf = jp.AthenaCommonFlags
    acf.FilesInput = %(files)s

    # events to process
    acf.EvtMax = EvtMax = theApp.EvtMax = %(evts)s

    # configuration
    import AthenaPython.ConfigLib as apcl
    cfg = apcl.AutoCfg(
        name='merge-files',
        input_files=acf.FilesInput(),
        output_file='%(output)s')

    for type_name in ('evgen',
                      'hits',
                      'rdo',
                      'esd',
                      'aod',
                      'tag',
                      'usr',):
        res = getattr(cfg, 'is_' + type_name)()
        if res:
            cfg.msg.info('input file type is ['+type_name+']')
            break
    else:
        cfg.msg.info('input file stream is of unknown type')
        cfg.msg.info('autoconfiguration might not work...')
        pass
        
    cfg.configure_job()

    if (cfg.is_rdo() or
        cfg.is_esd() or
        cfg.is_aod()):
        # main jobos
        include ('RecExCond/RecExCommon_flags.py')
        # FIXME: work-around for bug #56185
        from AthenaCommon.DetFlags import DetFlags
        DetFlags.makeRIO.all_setOff()
        # FIXME -- end
        include ('RecExCommon/RecExCommon_topOptions.py')
        
    """ % args.__dict__

    stdout = args.logfile
    if stdout.lower() in ('<stdout>', 'stdout',):
        stdout = None
    else:
        stdout = open(stdout, 'w')

    exitcode = app.run(stdout=stdout)
    return exitcode
コード例 #18
0
def doReadWriteTest(genName="pythia", evtMax=100):
    """A simple wrapper around the read/write tests..."""
    genName = genName.lower()
    ###-----------------------------------------------------
    print ""
    print "#" * 80
    print "## Testing [writing-%s]..." % genName
    templateJobO = """
OUTPUT='%(OutputFile)s';
DUMPTUPLE=True;
GENERATOR='%(Generator)s';
TUPLEFILENAME='%(TupleFile)s';
include( 'McParticleTests/iotest_WriteGenEvent_jobOptions.py' );
jobproperties.PerfMonFlags.OutputFile = '%(PmonFile)s';
"""
    jobOptions = [
        ChapPy.JobOptionsCmd( templateJobO % {
        'OutputFile' : workDir("mc.event.%s.pool" % genName),
        'Generator'  : genName.upper(),
        'TupleFile'  : outFiles['gen_%s' % genName]['ref']\
                       .replace(".pool",".tuple.root"),
        'PmonFile'   : workDir("write.genevent.%s.pmon.gz" % genName),
        } )
        ]

    athena = ChapPy.Athena(jobOptions=jobOptions,
                           logFile=workDir("write.genevent.%s.log" % genName),
                           checkLeak=False)
    athena.EvtMax = evtMax
    sc = athena.run()
    if sc != 0:
        return ScOutput(sc, "ERROR")
    else:
        inFile = workDir("write.genevent.%s.pmon.gz" % genName)
        outFile = workDir("out.write.genevent.%s.pmon.root" % genName)
        print commands.getoutput("perfmon.py %s -o %s" % (inFile, outFile))

    ###-----------------------------------------------------
    print ""
    print "#" * 80
    print "## Testing [ASCII-writing-%s (1)]..." % genName
    templateJobO = """
INPUT=%(InputFile)s;
OUTPUT='%(OutputFile)s';
DUMPTUPLE=True;
GENERATOR='%(Generator)s';
include( 'McParticleAlgs/GenEventAsciiWriter_jobOptions.py' );
"""
    jobOptions = [
        ChapPy.JobOptionsCmd(
            templateJobO % {
                'InputFile': [
                    workDir("mc.event.%s.pool" % genName),
                ],
                'OutputFile': workDir("mc.event.%s.1.ascii" % genName),
                'Generator': genName.upper(),
            })
    ]

    athena = ChapPy.Athena(jobOptions=jobOptions,
                           logFile=workDir("write.genevent.ascii.1.%s.log" %
                                           genName),
                           checkLeak=False)
    athena.EvtMax = evtMax
    sc = athena.run()
    if sc != 0:
        return ScOutput(sc, "ERROR")

    ###-----------------------------------------------------
    print ""
    print "#" * 80
    print "## Testing [ASCII-writing-%s (2)]..." % genName
    templateJobO = """
INPUT=%(InputFile)s;
OUTPUT='%(OutputFile)s';
DUMPTUPLE=True;
GENERATOR='%(Generator)s';
include( 'McParticleAlgs/GenEventAsciiWriter_jobOptions.py' );
"""
    jobOptions = [
        ChapPy.JobOptionsCmd(
            templateJobO % {
                'InputFile': [
                    workDir("mc.event.%s.pool" % genName),
                ],
                'OutputFile': workDir("mc.event.%s.2.ascii" % genName),
                'Generator': genName.upper(),
            })
    ]

    athena = ChapPy.Athena(jobOptions=jobOptions,
                           logFile=workDir("write.genevent.ascii.2.%s.log" %
                                           genName),
                           checkLeak=False)
    athena.EvtMax = evtMax
    sc = athena.run()
    if sc != 0:
        return ScOutput(sc, "ERROR")

    ###-----------------------------------------------------
    print "\n"
    print "#" * 80
    print "## Testing [ASCII-reading-%s]..." % genName
    templateJobO = """
INPUT=%(InputFiles)s;
include( 'McAsciiEventSelector/Example_McAsciiReader_jobOptions.py' );
"""
    jobOptions = [
        ChapPy.JobOptionsCmd( templateJobO % {
        'InputFiles' : [ workDir("mc.event.%s.1.ascii" % genName),
                         workDir("mc.event.%s.2.ascii" % genName),],
        'TupleFile' : outFiles['gen_%s' % genName]['chk']\
                      .replace(".pool",".tuple.root"),
        'PmonFile'  : workDir("read.genevent.%s.pmon.gz" % genName),
        } )
    ]
    athena = ChapPy.Athena(jobOptions=jobOptions,
                           logFile=workDir("read.genevent.%s.log" % genName),
                           checkLeak=False)
    athena.EvtMax = evtMax
    sc = athena.run()
    if sc != 0:
        return ScOutput(sc, "ERROR")

    ###-----------------------------------------------------
    print "\n"
    print "#" * 80
    return ScOutput(0, "OK")
コード例 #19
0
def doReadWriteTest(genName="TruthParticles", evtMax=100):
    ###-----------------------------------------------------
    print("\n")
    print("#" * 80)
    print("## Preparing input data... [%s]" % genName)
    templateJobO = """
OUTPUT='%(OutputFile)s';
DUMP=True;
include( 'McParticleTests/iotest_WriteGenEvent_jobOptions.py' );
"""
    jobOptions = [
        ChapPy.JobOptionsCmd(templateJobO % {
            'OutputFile': workDir("mc.event.%s.pool" % uuid),
        })
    ]
    athena = ChapPy.Athena(jobOptions=jobOptions,
                           logFile=workDir("mc.event.%s.pool.log" % uuid),
                           checkLeak=False)
    athena.EvtMax = evtMax
    sc = athena.run()
    if sc != 0:
        return ScOutput(sc, "ERROR")

    ###-----------------------------------------------------
    print("\n")
    print("#" * 80)
    print("## Testing [writing-%s]..." % genName)
    templateJobO = """
INPUT=['%(InputFile)s'];
OUTPUT= '%(OutputFile)s';
DUMPTUPLE=True;
TUPLEFILENAME='%(TupleFile)s';
include( 'McParticleTests/iotest_WriteTruthParticles_jobOptions.py' );
jobproperties.PerfMonFlags.OutputFile = '%(PmonFile)s';
"""
    jobOptions = [
        ChapPy.JobOptionsCmd(
            templateJobO % {
                'InputFile':
                workDir("mc.event.%s.pool" % uuid),
                'OutputFile':
                outFiles['ref'],
                'TupleFile':
                outFiles['ref'].replace(".pool", ".tuple.root"),
                'PmonFile':
                workDir("write.mcaod.%s.%s.perfmon.pmon.gz" % (genName, uuid)),
            })
    ]

    athena = ChapPy.Athena(jobOptions=jobOptions,
                           logFile=outFiles['ref'] + ".log",
                           checkLeak=False)
    athena.EvtMax = evtMax
    sc = athena.run()
    if sc != 0:
        return ScOutput(sc, "ERROR")
    else:
        inFile = workDir("write.mcaod.%s.%s.perfmon.pmon.gz" % (genName, uuid))
        outFile = workDir("out.write.mcaod.%s.%s.perfmon.root" %
                          (genName, uuid))
        subprocess.getoutput("perfmon.py %s -o %s" % (inFile, outFile))

    ###-----------------------------------------------------
    print("\n")
    print("#" * 80)
    print("## Testing [reading-%s]..." % genName)
    templateJobO = """
INPUT=['%(InputFile)s'];
OUTPUT= '%(OutputFile)s';
DUMPTUPLE=True;
TUPLEFILENAME='%(TupleFile)s';
include( 'McParticleTests/iotest_ReadTruthParticles_jobOptions.py' );
jobproperties.PerfMonFlags.OutputFile = '%(PmonFile)s';
"""
    jobOptions = [
        ChapPy.JobOptionsCmd(
            templateJobO % {
                'InputFile':
                outFiles['ref'],
                'OutputFile':
                outFiles['chk'],
                'TupleFile':
                outFiles['chk'].replace(".pool", ".tuple.root"),
                'PmonFile':
                workDir("read.mcaod.%s.%s.perfmon.pmon.gz" % (genName, uuid)),
            })
    ]
    athena = ChapPy.Athena(jobOptions=jobOptions,
                           logFile=outFiles['chk'] + ".log",
                           checkLeak=False)
    athena.EvtMax = evtMax
    sc = athena.run()
    if sc != 0:
        return ScOutput(sc, "ERROR")
    else:
        inFile = workDir("read.mcaod.%s.%s.perfmon.pmong.gz" % (genName, uuid))
        outFile = workDir("out.read.mcaod.%s.%s.perfmon.root" %
                          (genName, uuid))
        subprocess.getoutput("perfmon.py %s -o %s" % (inFile, outFile))

    ###-----------------------------------------------------
    print("\n")
    print("#" * 80)
    return doMcAodTupleValidation(
        outFiles['ref'].replace(".pool", ".tuple.root"),
        outFiles['chk'].replace(".pool", ".tuple.root"))
コード例 #20
0

uuid = "%s_%s" % (os.getpid(), timeofday())

print("#" * 80)
print("## testing McEventCollection I/O...")
print("## Job uuid:", uuid)
benchSequence = BenchSequence("McEventCollection I/O")

EVTMAX = 100
NTIMES = 1

###-----------------------------------------------------
print("## Testing [writing]...")
jobOptions = [
    ChapPy.JobOptionsCmd("OUTPUT=\"%s\"" %
                         workDir("mc.io.event.%s.pool" % uuid)),
    ChapPy.JobOptionsCmd("DUMP=False"),
    ChapPy.JobOptions("McParticleTests/iotest_WriteGenEvent_jobOptions.py"),
]

athena = ChapPy.Athena(
    jobOptions=jobOptions,
    #logFile = "/dev/stdout",
    checkLeak=False)
athena.EvtMax = EVTMAX

bench = iobench.AthBench(
    athena,
    nTimes=NTIMES,
    logFile=workDir("write.%s.log" %
                    os.path.basename(workDir("mc.io.event.%s.pool" % uuid))))
コード例 #21
0
print("#" * 80)
print("## testing symlinks for TruthParticleContainer...")
bench = BenchSequence("Symlinks for TruthParticleContainer")

print("## installing reference files...")
installRefFiles([
    "mc.aod.symlinks.ref",
    "mc.aod.pysymlinks.ref",
])

evtMax = 5

###-----------------------------------------------------
print("## Testing [writing]...")
jobOptions = [
    ChapPy.JobOptionsCmd("OUTPUT=\"%s\"" % workDir("mc.aod.symlinks.pool")),
    ChapPy.JobOptions("McParticleTests/symlinkTest_mcAod_jobOptions.py"),
]

athena = ChapPy.Athena(jobOptions=jobOptions,
                       logFile=workDir("mc.aod.symlinks.pool.log"),
                       checkLeak=False)
athena.EvtMax = evtMax
athena.run()

###-----------------------------------------------------
print("\n")
print("#" * 80)
bench += doPostCheck("McAod symlinks", workDir("mc.aod.symlinks.ref"),
                     workDir("mc.aod.symlinks.pool.log"),
                     "grep \"^McAodSymLinkTests\"")
コード例 #22
0
# @file:    test_iobench.py
# @purpose: unit test file for the iobench module
# @author:  Sebastien Binet <*****@*****.**>
# @date:    July 2006

import user
import sys
from TestTools import iobench
from AthenaCommon import ChapPy

if __name__ == "__main__":
    print "#" * 80
    print "## testing iobench ..."

    jobOptions = [
        ChapPy.JobOptionsCmd("OUTPUT=\"/tmp/slimmed.aod.pool\""),
        ChapPy.JobOptions("McParticleAlgs/test_WriteMcAod_jobOptions.py")
    ]
    athena = ChapPy.Athena(jobOptions=jobOptions, checkLeak=True)
    athena.EvtMax = 100

    bench = iobench.AthBench(athena, nTimes=10)
    print "## bench:"
    print bench.athena
    bench.run()

    bench.ioStats(["GEN_AOD", "GEN_EVENT", "SpclMC"], "w")
    bench.save("iobench-%ievts.log" % athena.EvtMax)
    bench.stats()

    print ""