Beispiel #1
0
 def __init__(self, desc):
     from histbook import Hist
     # hist_descriptor is what is produced by HBHistogramCompiler.descriptor()
     self.H = Hist.fromjson(desc["historgam"])
     self.Mapping = desc["mapping"]              # maps historgam input name to stream name
     self.Constants = desc["constants"]
     self.Inputs = set(self.Mapping.keys())
     self.T = None
    temp_i = 0
    if args.signal:
        for index, row in df_sig_masses.iterrows():
            temp_i += 5
            label = '$M_{\mathrm{Squark}}$ = ' + str(
                row["M_sq"]) + ', $M_{\mathrm{LSP}}$ = ' + str(row["M_lsp"])
            print(label, var)
            df_temp = df_sig.loc[(df_sig['M_sq'] == row['M_sq'])
                                 & (df_sig['M_lsp'] == row['M_lsp'])]
            df_temp = df_temp.compute()
            if args.NMinusOne:
                df_temp = df_NMinusOne(df_temp, var, args.region)
            else:
                df_temp = df_temp[[var, 'weight']]
            if df_temp[var].shape[0] > 0:
                h = Hist(dict[var]['bin'], weight='weight')
                h.fill(df_temp)
                df = h.pandas(normalized=args.norm).reset_index()[1:-1]
                df[var] = df[var].apply(lambda x: x.left)
                plt.hist(df[var],
                         bins=df[var],
                         weights=df['count()'],
                         normed=args.norm,
                         label=label,
                         log=True,
                         histtype="step",
                         linewidth=linewidth,
                         zorder=35 - temp_i)

    if args.MSSM:
        label = 'MSSM-like: $M_{\mathrm{Squark}}$ = ' + str(
Beispiel #3
0
import time, socket

class Worker(object):

    Columns = ["A.p", "n"]

    def run(self, events, job, database):
        job.fill(n = events.n)
        job.fill(p = events.A.p)
"""

dataset = "Sample"

session = Session("striped_dev.yaml")

h_n = Hist(hbin("n", 20, 0, 20))
h_p = Hist(hbin("p", 20, 0, 20))

job = session.createJob(dataset,
                        user_params={"param": {
                            "hello": "world"
                        }},
                        worker_class_source=worker_class,
                        histograms=[h_n, h_p])
job.run()
runtime = job.TFinish - job.TStart
nevents = job.EventsProcessed
print "%s: %.6fM events, %.6fM events/second" % (
    dataset, float(nevents) / 1000000, nevents / runtime / 1000000)

print h_n.pandas()
Beispiel #4
0
import json
from QWorker import DistributedStripedSession as Session
from QWorker import Histogram

from datasets import Datasets

from histbook import Hist, beside
from histbook import bin
#import vegascope; canvas = vegascope.LocalCanvas()

registry_url = "http://ifdb01.fnal.gov:9867"
data_server_url = "http://dbweb7.fnal.gov:9091/striped/app"

session = Session(data_server_url, registry_url)

muon_e = Hist(bin("sqrt(mu_px**2+mu_py**2+mu_pz**2)", 10, 10.0, 1000.0),
              bin("sqrt(mu_e**2-mu_px**2-mu_py**2-mu_pz**2)", 15, 0.1, 0.15))

job = session.createJob(
    "Summer16.SMS-T5qqqqZH-mGluino1700_TuneCUETP8M1_13TeV-madgraphMLM-pythia8",
    worker_class_file="worker_new.py",
    display=False)
job.addHistogram(muon_e, ["mu_e", "mu_px", "mu_py", "mu_pz"])

job.start()
job.waitDone()

print muon_e.pandas()
Beispiel #5
0
 def add(self, dump):
     from histbook import Hist
     # dump is what is returned by HBHistogramCollector.dump
     h = Hist.fromjson(json.loads(dump))
     #print "HBHistogramAggregator.add: delta content: %s" % (h._content,)
     self.H += h
Beispiel #6
0
worker_class = """
import cloudpickle

class Worker(object):

    Columns = ["NJets"]

    def run(self, events, job):
        job.message("%d events" % (len(events),))
	x = 5/0
"""

session = Session(("ifdb01.fnal.gov", 8765))

h_by_dataset = Hist(hbin("NJets", 20, 0, 20), groupby("dataset"))

datasets = [
        "Summer16.TTHH_TuneCUETP8M2T4_13TeV-madgraph-pythia8"          		# 100000 events
]

class Callback:
	def on_message(self, wid, nevents, message):
		print "Message received from worker %d after seeing %d events: <%s>" % (wid, nevents, message)

	def on_exception(self, wid, info):
		print "Worker %d failed with exception:\n%s" % (wid, info)

callback = Callback()
        
Beispiel #7
0
worker_class = """
class Worker(object):

    Columns = ["nJet","nMuon","nElectron","Jet.pt", "Muon.pt"]

    def run(self, events, job):
        job.fill(nJet=events.nJet)
	job.fill(nElectron=events.nElectron)
	job.fill(nMuon=events.nMuon)
        job.fill(JetPt = events.Jet.pt)
	job.fill(MuonPt = events.Muon.pt)
"""

session = Session()

njets = Hist(hbin("nJet", 20, 0, 20))
nmuon = Hist(hbin("nMuon", 20, 0, 20))
nelectron = Hist(hbin("nElectron", 20, 0, 20))
muon_pt = Hist(hbin("MuonPt", 70, 0., 700.))
jet_pt = Hist(hbin("JetPt", 70, 0., 700.))


dataset = "QCD_HT200to300_PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1_NANOAODSIM"
#dataset = "JetHT_Run2016H_05Feb2018_ver2-v1_NANOAOD"

class Callback:
	def on_exception(self, wid, info):
		print "Exception:", info
        

job = session.createJob(dataset,
Beispiel #8
0
import numpy as np

class Worker(object):

    Columns = ["NJets"]

    def run(self, events, job, db):
	data = np.frombuffer(db["calib200"], "<f4")
        job.fill(x = data)
	job.message("average=%f" % (np.mean(data),))
"""

job_server = ("ifdb02.fnal.gov", 8765)
session = Session(job_server)

h = Hist(hbin("x", 20, 0, 1))

dataset = "Summer16.TTHH_TuneCUETP8M2T4_13TeV-madgraph-pythia8"

job = session.createJob(dataset,
                        worker_class_source=worker_class,
                        histograms=[h])
job.run()
runtime = job.TFinish - job.TStart
nevents = job.EventsProcessed
print "%s: %.6fM events, %.6fM events/second" % (
    dataset, float(nevents) / 1000000, nevents / runtime / 1000000)

data_frame = h.pandas()

print data_frame