Ejemplo n.º 1
0
    logger.info('Run  info for #run=%d ' % 169064)
    logger.info(80 * '*')
    rinfo = run_info(169064)
    for k in rinfo:
        logger.info("  %30s : %-35s " % (k, rinfo[k]))

    logger.info(80 * '*')
    logger.info('Fill info for #fill=%d ' % 4691)
    logger.info(80 * '*')

    finfo = fill_info(4691)
    for k in finfo:
        logger.info("  %30s : %-35s " % (k, finfo[k]))

    logger.info(80 * '*')

    from Ostap.Utils import timing
    runs = [0, 1, 169064, 5, 6, 98241980, 169064, 2334, 2334, 524387]
    for run in runs:

        with timing():

            fill = fill_number(run)
            logger.info('Run/Fill#:%12d/%-10d ' % (run, fill))

    logger.info(80 * '*')

# =============================================================================
# The END
# =============================================================================
Ejemplo n.º 2
0
data7 = DataAndLumi('Y/Y', patterns[:2])
data8 = DataAndLumi('Y/Y', patterns[2:])
dataY = DataAndLumi('Y/Y', patterns)
logger.info('DATA@  7TeV %s' % data7)
logger.info('DATA@  8TeV %s' % data8)
logger.info('DATA@7+8TeV %s' % dataY)

logger.info('TChain  %s' % data7.chain)

h1 = ROOT.TH1D('h1', '', 200, 8.5, 12.5)
h2 = h1.clone()

chain = dataY.chain

with timing('SEQUENTIAL(1M):'):
    chain.project(h1, 'mass', 'minann_mu>0.5 && 8.5<=mass && mass<12.5', '',
                  1000000)

print h1.dump(40, 20)

import Ostap.Kisa

with timing('PARALLEL(%dM):' % int(len(chain) / 1.e+6)):
    chain._project(h2, 'mass', 'minann_mu>0.5 && 8.5<=mass && mass<12.5')

print h2.dump(40, 20)

# =============================================================================
# The END
# =============================================================================
Ejemplo n.º 3
0
    model_bw.signal.gamma.release()
    result, frame = model_bw.fitTo(dataset0)

if 0 != result.status() or 3 != result.covQual():
    logger.warning('Fit is not perfect MIGRAD=%d QUAL=%d ' %
                   (result.status(), result.covQual()))
    print result
else:
    print 'Signal & Background are: ', result('S')[0], result('B')[0]
    ##print 'Mean   & Gamma      are: ', result ( 'mean_Gauss')[0] , result( 'gamma_BW' )[0]

models.append(model_bw)

#
## check that everything is serializable
#
logger.info('Saving all objects into DBASE')
import Ostap.ZipShelve as DBASE
from Ostap.Utils import timing
with timing(), DBASE.tmpdb() as db:
    db['mass,vars'] = mass, varset0
    db['dataset'] = dataset0
    db['models'] = models
    db['result'] = result
    db['frame'] = frame
    db.ls()

# =============================================================================
# The END
# =============================================================================
Ejemplo n.º 4
0
    h2.Fill ( f2.GetRandom() )
    h3.Fill ( f3.GetRandom() )
    h4.Fill ( f4.GetRandom() )
    h5.Fill ( f5.GetRandom() )
    h6.Fill ( f6.GetRandom() )

    
# h1 - decreasing convex
# h2 - increasing convex
# h3 - increasing concave
# h4 - decreasing concave 
# h5 - non-monothonic convex
# h6 - non-monothonic concave


with timing ( 'Bershtein' ) :
    rB1 = h1.bernstein  ( 3 )
    rB2 = h2.bernstein  ( 3 )
    rB3 = h3.bernstein  ( 3 )
    rB4 = h4.bernstein  ( 3 )
    rB5 = h5.bernstein  ( 3 )
    rB6 = h6.bernstein  ( 3 )
    
with timing ( 'Chebyshev' ) : 
    rC1 = h1.chebyshev  ( 3 )
    rC2 = h2.chebyshev  ( 3 )
    rC3 = h3.chebyshev  ( 3 )
    rC4 = h4.chebyshev  ( 3 )
    rC5 = h5.chebyshev  ( 3 )
    rC6 = h6.chebyshev  ( 3 )
Ejemplo n.º 5
0
increasing = Models.Monothonic_pdf('PI', mass4, power=3, increasing=True)
decreasing = Models.Monothonic_pdf('PD', mass4, power=3, increasing=False)
inc_convex = Models.Convex_pdf('PIX',
                               mass4,
                               power=3,
                               increasing=True,
                               convex=True)
dec_convex = Models.Convex_pdf('PDX',
                               mass4,
                               power=3,
                               increasing=False,
                               convex=True)
convex = Models.ConvexOnly_pdf('PX', mass4, power=3, convex=True)
concave = Models.ConvexOnly_pdf('PX', mass4, power=3, convex=False)

with timing('Positive   5'), rooSilent():
    r5, f = positive.fitTo(dataset5)
with timing('Increasing 5'), rooSilent():
    i5, f = increasing.fitTo(dataset5)
with timing('Convex     5'), rooSilent():
    x4, f = inc_convex.fitTo(dataset5)
with timing('ConvexOnly 5'), rooSilent():
    c4, f = convex.fitTo(dataset5)

## logger.info ( 'Positive   pars: %s' % positive  .pdf.function().pars() )
## logger.info ( 'Increasing pars: %s' % increasing.pdf.function().pars() )
## logger.info ( 'Convex     pars: %s' % inc_convex.pdf.function().pars() )

for i in (positive, increasing, inc_convex, convex):
    pars = i.pdf.function().bernstein().pars()
    pars = list(pars)
Ejemplo n.º 6
0
logger.info('ZipShelve    keys: %s' % db_zip .keys() )
logger.info('RootShelve   keys: %s' % db_root.keys() )

db_sql .close() 
db_zip .close()
db_root.close()

logger.info('SQLiteShelve size: %d ' % os.path.getsize( db_sql_name  ) )
logger.info('ZipShelve    size: %d ' % os.path.getsize( db_zip_name  ) )
logger.info('RootShelve   size: %d ' % os.path.getsize( db_root_name ) )

db_sql  = SQLiteShelve.open    ( db_sql_name  , 'r' )
db_zip  = ZipShelve.open       ( db_zip_name  , 'r' )
db_root = RootShelve.open      ( db_root_name , 'r' )

with timing ( 'h2-read/SQL'  ) : h2_sql  = db_sql  [ 'histo-2D']
with timing ( 'h2_read/ZIP'  ) : h2_zip  = db_zip  [ 'histo-2D']
with timing ( 'h2_read/ROOT' ) : h2_root = db_root [ 'histo-2D']
with timing ( 'tu-read/SQL'  ) : tu_sql  = db_sql  [ 'both'    ]
with timing ( 'tu_read/ZIP'  ) : tu_zip  = db_zip  [ 'both'    ] 
with timing ( 'tu_read/ROOT' ) : tu_root = db_root [ 'both'    ]
with timing ( 'h1-read/SQL'  ) : h1_sql  = db_sql  [ 'histo-1D']
with timing ( 'h1-read/ZIP'  ) : h1_zip  = db_zip  [ 'histo-1D']
with timing ( 'h1-read/ROOT' ) : h1_root = db_root [ 'histo-1D']

for i in h1_sql : 
    v = h1_sql  [i] - h1_zip[i] 
    if not iszero ( v.value() ) :
        logger.error('Large difference for 1D histogram(1)!')
    v = h1_sql [i] - h1    [i] 
    if not iszero ( v.value() ) :
Ejemplo n.º 7
0
data8 = DataAndLumi('Y/Y', patterns[2:])
dataY = DataAndLumi('Y/Y', patterns)
logger.info('DATA@  7TeV %s' % data7)
logger.info('DATA@  8TeV %s' % data8)
logger.info('DATA@7+8TeV %s' % dataY)

logger.info('TChain  %s' % data7.chain)

variables = {('mass', 'mass(mu+mu-)', 8.5, 11.5, lambda s: s.mass)}

import Ostap.Kisa as Kisa

ppservers = ()  ## 'lxplus051' , )

one_file = dataY.chain[:1]
with timing('One file %s' % len(one_file)):
    ds0 = Kisa.fillDataSet(one_file,
                           variables,
                           '8.5<=mass && mass<11.5 && -0.1<c2dtf && c2dtf<5',
                           ppservers=ppservers)
    logger.info('Dataset: %s' % ds0)

with timing('All files %s' % len(dataY.chain)):
    dsa = Kisa.fillDataSet(dataY.chain,
                           variables,
                           '8.5<=mass && mass<11.5 && -0.1<c2dtf && c2dtf<5',
                           ppservers=ppservers)

    logger.info('Dataset: %s' % dsa)

# =============================================================================