Example #1
0
# Create a row-wise ntuple in an HBOOK file.
schema = hep.table.Schema()
schema.addColumn("index", "int32")
schema.addColumn("value32", "float32")
schema.addColumn("value64", "float64")
hbook_file = create("cwn1.hbook")
table = createTable("cwn", hbook_file, schema, column_wise=1)

# Fill it with random values, saving these in an array.
values = []
for i in xrange(0, 100):
    value = random()
    values.append(value)
    table.append(index=i, value32=value, value64=value)
del table, hbook_file

# Open the ntuple.
table = open("cwn1.hbook")["cwn"]
assert_(table.column_wise)
compare(len(table), 100)
# Compare the values in it to the array.
i = 0
for row in table:
    compare(row["index"], i)
    compare(row["value32"], values[i], precision=1e-6)
    compare(row["value64"], values[i], precision=1e-9)
    i += 1
del row, table

Example #2
0
# Create a table without metadata.
table1 = hep.table.create(
    "metadata2-1.table", schema, with_metadata=False)
table1.notes = "more stuff"
table1.append(x=10)
del table1

# Create a table with metadata.
table2 = hep.table.create(
    "metadata2-2.table", schema, with_metadata=True)
table2.notes = "more stuff"
table2.append(x=12)
del table2

# Make sure no metadata file was written for the first table.
assert_(not os.path.isfile("metadata2-1.table.metadata"))

# Make sure no metadata attributes were recovered for the first table.
table = hep.table.open("metadata2-1.table", with_metadata=False)
assert_(not hasattr(table, "notes"))
assert_(not hasattr(table.schema, "description"))
assert_(not hasattr(table.schema["x"], "units"))
compare(len(table), 1)
compare(table[0]["x"], 10)
del table

# Open the second table without metadata, and make sure no metadata
# attributes were recovered.
table = hep.table.open("metadata2-2.table", with_metadata=False)
assert_(not hasattr(table, "notes"))
assert_(not hasattr(table.schema, "description"))
Example #3
0
def callback(value, weight):
    global sum
    assert_(weight == 1)
    sum += value
Example #4
0
from   hep import test
import hep.lorentz
from   hep.lorentz import lab
from   math import sqrt

cm_frame = hep.lorentz.Frame(lab.Boost(0, 0, 0.5))
p4 = cm_frame.Momentum( 4.0,  1.0,  0.0, -1.0)
q4 = cm_frame.Momentum( 4.0, -1.0,  0.0,  1.0)
t, x, y, z = cm_frame.coordinatesOf(p4 + q4)
test.compare(t, 8.0)
test.compare(x, 0.0)
test.compare(y, 0.0)
test.compare(z, 0.0)
test.compare(p4.mass, sqrt(14.0), precision=1e-8)
test.compare(q4.mass, sqrt(14.0), precision=1e-8)
test.compare(p4 ^ q4, 18.0, precision=1e-8)

tl, xl, yl, zl = lab.coordinatesOf(p4 + q4)
test.assert_(tl > t)
test.compare(xl, 0.0)
test.compare(yl, 0.0)

Example #5
0
#        mu+
#        nu_mu
#      pi+
#     gamma

particle = hep.mctruth.parseDecay(
    "[Upsilon(4S),[B-,[D*0,[D0,[pi0,[gamma],[gamma]],[pi0,[gamma],[gamma]],[pi+,[mu+],[nu_mu]],[K-,[n0]]],[gamma]],[a_20,[rho+,[pi0,[gamma],[gamma]],[pi+]],[pi-]],[pi-,[n0]]],[B+,[K'*+,[K*0,[pi-],[K+,[mu+],[nu_mu]]],[pi+]],[gamma]]]"
)

# -----------------------------------------------------------------------
# tests
# -----------------------------------------------------------------------

# Test CC mode.

pattern = hep.mctruth.parseDecay("[Upsilon(4S),[B+,K'*+,gamma],B-]")
match = hep.mctruth.matchTree(pattern, particle, cc_mode=1)
assert_(match)
match = hep.mctruth.matchTree(pattern, particle, cc_mode=-1)
assert_(not match)
match = hep.mctruth.matchTree(pattern, particle, cc_mode=0)
assert_(match)

pattern = hep.mctruth.parseDecay("[Upsilon(4S),[B-,K'*-,gamma],B+]")
match = hep.mctruth.matchTree(pattern, particle, cc_mode=1)
assert_(not match)
match = hep.mctruth.matchTree(pattern, particle, cc_mode=-1)
assert_(match)
match = hep.mctruth.matchTree(pattern, particle, cc_mode=0)
assert_(match)
Example #6
0
# Generate some samples.
samples1 = [ ((random.normalvariate(0, 1), random.normalvariate(0, 1)), 1)
             for n in xrange(1000) ]
samples2 = [ ((random.normalvariate(2, 1), random.normalvariate(3, 2)), 1)
             for n in xrange(1000) ]

# Construct the Fisher discriminant.
fisher = FisherDiscriminant(samples1, samples2)

# Compute the Fisher values for each sample.
f1 = [ fisher(s) for (s, w) in samples1 ]
f2 = [ fisher(s) for (s, w) in samples2 ]
# Find the optimal cut on the Fisher discriminant.
cut, fom = optimize(f1, f2, ">")

# Count the number of signal events that were correctly classified.
right1 = 0
for value in f1:
    if value > cut:
        right1 += 1
assert_(right1 > 900)

# Count the number of background events that were correctly classified.
right2 = 0
for value in f2:
    if value < cut:
        right2 += 1
assert_(right2 > 800)

Example #7
0
#       K+
#        mu+
#        nu_mu
#      pi+
#     gamma

particle = hep.mctruth.parseDecay("[Upsilon(4S),[B-,[D*0,[D0,[pi0,[gamma],[gamma]],[pi0,[gamma],[gamma]],[pi+,[mu+],[nu_mu]],[K-,[n0]]],[gamma]],[a_20,[rho+,[pi0,[gamma],[gamma]],[pi+]],[pi-]],[pi-,[n0]]],[B+,[K'*+,[K*0,[pi-],[K+,[mu+],[nu_mu]]],[pi+]],[gamma]]]")

#-----------------------------------------------------------------------
# tests
#-----------------------------------------------------------------------

# Test checking of subdecays.

pattern = hep.mctruth.parseDecay("[Upsilon(4S),[B+,K'*+,gamma],B-]")
assert_(hep.mctruth.matchTree(pattern, particle))

pattern = hep.mctruth.parseDecay("[Upsilon(4S),[B+,K'*+,gamma,gamma],B-]")
assert_(not hep.mctruth.matchTree(pattern, particle))

pattern = hep.mctruth.parseDecay("[Upsilon(4S),[B+,K'*+],B-]")
assert_(not hep.mctruth.matchTree(pattern, particle))

pattern = hep.mctruth.parseDecay("[Upsilon(4S),[B+,K'*+,gamma],pi0,B-]")
assert_(not hep.mctruth.matchTree(pattern, particle))

pattern = hep.mctruth.parseDecay("[Upsilon(4S),[B+,K'*+,gamma]]")
assert_(not hep.mctruth.matchTree(pattern, particle))

pattern = hep.mctruth.parseDecay("[Upsilon(4S),[B+],B-]")
assert_(not hep.mctruth.matchTree(pattern, particle))
Example #8
0
#-----------------------------------------------------------------------

import hep.expr
import hep.hist
import hep.hist.fit
from   hep.test import compare, assert_
from   math import sqrt
import random

#-----------------------------------------------------------------------
# test
#-----------------------------------------------------------------------

histogram = hep.hist.Histogram1D(20, (0.0, 4.0), bin_type=float)
contents = [ 6.8, 5.8, 4.1, 5.1, 3.9, 4.8, 4.3, 4.5, 4.0, 5.1, 5.7, 5.9,
             7.0, 8.4, 9.4, 10.7, 12.4, 14.2, 15.7, 18.0 ]
for i in range(20):
    histogram.setBinContent(i, contents[i])

expr = hep.expr.parse("a * x ** 2 + b * x + c")
result = hep.hist.fit.chiSquareFit1D(
    histogram, expr, "x", (("a", 1), ("b", 1), ("c", 1), ))
print result

compare(result.minuit_status, 3)
assert_(result.minimum < 40)
# Despite the noise, the parameter values shouldn't be *too* far off.
compare(result.values["a"],  10, precision=1)
compare(result.values["b"], -25, precision=1)
compare(result.values["c"],  35, precision=1)