def generate_sim_histograms(degrader, g4bl, exec_d4_d5):
  print "Starting simulation of", degrader, "g4bl:", g4bl
  if g4bl:
    out_file_name = out_sim_g4bl_file_fmt.format(degrader=degrader)
  else:
    out_file_name = out_sim_file_fmt.format(degrader=degrader)
  
  out_file = TFile(out_file_name, "RECREATE")
  
  charges = {"+":"pos_"+degrader,"-":"neg_"+degrader}
  hists = {c:make_hist(charges[c], **hist_settings) for c in charges}
  hists['combined'] = make_hist("combined_"+degrader,**hist_settings)
  
  if g4bl:
    in_file_name = in_sim_g4bl_file_fmt.format(degrader=degrader)
    tree = get_tree_from_file("truth",in_file_name)
    fill_g4bl_hists (tree, hists["+"], hists["-"], exec_d4_d5)
    hists['combined'].Add(hists['+'], hists['-'])
  else:
    neg_pos_ratio = get_g4bl_decay_ratio(degrader)
    for c in charges:
      in_file_name = in_sim_file_fmt.format(charge=c, degrader=degrader)
      tree = get_tree_from_file("truth",in_file_name)
      fill_sim_hist (tree, hists[c], c, exec_d4_d5)
    hists['combined'].Add(hists['+'], hists['-'], 1.0, neg_pos_ratio)
    # hists['combined'].Add(hists['+'], hists['-'], 1.0, neg_pos_ratio)
    
  save_file(out_file)
def get_hist(Z, A, n_entries=1e6):
  name = "Z={Z} A={A}".format(Z=Z, A=A)
  axis_titles = ("Time (ns)", "Count")
  hist = make_hist(name, 0, 5000, axis_titles, 500)
  for i in xrange(int(n_entries)):
    hist.Fill(get_delay(Z,A))
  return hist
def make_ch_hists(tree, channels, l_bound=-20000, u_bound=20000, bins=400, titles=("TDC - TDC0 (ns)", "Count")):
    res = {}
    for ch in channels:
        name = "dt_file:{}_ch:{}".format(tree.file_id, ch)
        res[ch] = make_hist(name, mins=l_bound, maxs=u_bound, bins=bins, titles=titles)
        res[ch].file_id = tree.file_id
        res[ch].ch = ch
    return res
def make_count_hist(name, target, data_dict):
  titles=("Degrader", "Muon count")
  res = make_hist(name, mins=0, maxs=len(data_dict), bins=len(data_dict), titles=titles)
  
  dz_ordered_keys = [(k,v.deg_dz) for k,v in data_dict.items()]
  # sort by degrader thickness
  dz_ordered_keys.sort(key=lambda x:x[1])
  for bin_id, (file_id, degrader) in enumerate(dz_ordered_keys, 1):
    count = data_dict[file_id].sum_integrals[target]
    set_hist_bin_contents_and_er(res, bin_id, count, name=str(degrader))
  return res
def create_histogram_from_tree(name, tree, bin_width, mu_type, fast=False):
  lb, ub = -20000, 20000
  nbins = (ub - lb)/bin_width
  hist = make_hist(name, mins=lb,maxs=ub, titles=["Time (ns)", "Count"], bins=nbins)
  hist.bin_width = bin_width
  for entry_id, entry in enumerate(tree):
    if fast and entry_id> 2000:
      break
    time = get_mu_e_dt_for_entry(entry, mu_type)
    for t in time:
      hist.Fill(t)
    # if time:
    #   hist.Fill(time)
  return hist
def generate_data_histograms(run_id):
  # Open the input file & init the tree
  print "Starting run", run_id
  in_file_name = in_data_file_fmt.format(run_id=run_id)
  tree = get_tree_from_file("Trigger", in_file_name)
  assign_leaves(tree, [c[0] for c in channels]) # use comprehension to make a list of just the channels
  
  # Create the file to write to
  out_file_name = out_data_file_fmt.format(run_id=run_id)
  out_file = TFile(out_file_name, "RECREATE")
  
  # Make the histograms & fill them
  hists = {ch[0]:make_hist(ch[0], **hist_settings) for ch in channels}
  fill_data_hists (tree, hists)
  save_file(out_file)
def get_sec_count_hist_for_file(data, file_id):
    name = "SEC count, run: %i"%int(file_id)
    # set the minimum x value as half the min time to try and get the bins aligned nicely
    xmin = float(min(data.keys()))/2.0
    # max will return the time,er pair with largest time; which is what we want
    xmax = max(data.keys())
    # whilst the time divisions are not constant the errors are very small
    xbins = len(data)
    titles = ("Time (ns)", "SEC count")
    res = make_hist(name, 0, xmax, titles, xbins)
    times = data.keys()
    times.sort()
    for bin, time in enumerate(times):
        val = data[time]
        res.Fill(time,val)
    return res
def get_potential_trigger_count_hist_for_file(data, file_id):
    name = "Potential trigger count, run: %i"%int(file_id)
    # set the minimum x value as half the min time to try and get the bins aligned nicely
    xmin = float(min(data.keys(),key=lambda x:x[0])[0])/2
    # max will return the time,er pair with largest time; which is what we want
    xmax = max(data.keys(),key=lambda x:x[0])[0]
    # whilst the time divisions are not constant the errors are very small
    xbins = len(data)
    titles = ("Time (ns)", "Trigger Count")
    # res = make_hist(name, xmin, xmax, titles, xbins)
    res = make_hist(name, 0, xmax, titles, xbins)
    times = data.keys()
    times.sort(key=lambda x:x[0])
    for bin, time in enumerate(times):
        val = data[time][2]
        res.Fill(time[0],val)
    return res