示例#1
0
def analyze_pulser_data(outputfile='temp.root', force_overwrite=False):
    server = SoudanServer()
    file_list = server.get_accepted_runs()

    time_list = []
    mean_list = []
    sigma_list = []
    for id in file_list:
        print id
        rundoc = server.get_run(id.id) 
        if len(time_list) == 0:
            first_time = rundoc.time_of_start_of_run
        time_list.append(rundoc.time_of_start_of_run - first_time)
        pd = rundoc.pulser_data
        mean_list.append((pd.mean, \
                        pd.mean_err))
        sigma_list.append((pd.sigma, \
                       pd.sigma_err))

    file_to_output = TFile(outputfile, 'recreate');
    objects_to_write = []
    # generate final plots
    list_to_analyze = [ ("Mean of pulser signal", "Mean (keV)", mean_list),\
                        ("Sigma of pulser signal", "Sigma (keV)", sigma_list) ]
    for name, axis_name, data_list in list_to_analyze:
         
        file_to_output.cd()
        new_graph = TGraphErrors(len(data_list))
        new_graph.SetNameTitle(name.replace(' ' , ''),\
                               name.replace(' ' , ''))
        new_hist = TH1F(name.replace(' ', '') + "hist",\
                        name, 100, time_list[0].days, \
                        time_list[len(time_list)-1].days + 1)
        new_hist.GetXaxis().SetTitle("Run start time (days)")
        new_hist.GetYaxis().SetTitle(axis_name)
        new_hist.GetYaxis().SetTitleOffset(1.17)

        maximum = data_list[0][0]
        minimum = data_list[0][0]
        for i in range(len(data_list)):
            new_graph.SetPoint(i, time_list[i].days + time_list[i].seconds/(24*3600.),\
                               data_list[i][0])
            new_graph.SetPointError(i, 0,\
                               data_list[i][1])
            if minimum > data_list[i][0]: 
                minimum = data_list[i][0]
            if maximum < data_list[i][0]: maximum = data_list[i][0]
            

        ten_percent = (maximum - minimum)*0.1
        new_hist.SetMaximum(maximum + ten_percent)
        new_hist.SetMinimum(minimum - ten_percent)
        #new_graph.SetHistogram(new_hist)
        objects_to_write.append(new_graph)
        objects_to_write.append(new_hist)

    file_to_output.cd()
    for object in objects_to_write:
        object.Write(object.GetName(), TObject.kOverwrite)
    file_to_output.Close()
def analyze_risetime_vs_energy(outputfile='temp.root'):

    channels_to_analyze = {0 : ("*0.0125749", "low_energy", 80), \
                           8 : ("*0.602356 -0.303815", "high_energy", 2800)}
    histograms = {}

    server = SoudanServer()
    file_list = server.get_accepted_runs()
    
    hist_2d = None
    file_to_output = TFile(outputfile, 'recreate');
    objects_to_write = []
    for chan, tmp_tuple in channels_to_analyze.items():
        energy_scale = tmp_tuple[0]
        maximum = tmp_tuple[2] 
        histograms[chan] = TH2D("RiseTimeVsEnergyChannel%i" % chan, "RiseTimeVsEnergyChannel%i" % chan, 400, 0, maximum, 100, 0, 10)
        hist_2d = histograms[chan]
        hist_2d.GetXaxis().SetTitle("Energy (keV)")
        hist_2d.GetYaxis().SetTitle("10-90 Risetime (#mus)")
        objects_to_write.append(hist_2d)
        

    for id in file_list:
        print id
        rundoc = server.get_run(id.id) 
        real_value = rundoc.baseline_dict.average_fit_constant 
        real_value_rms = rundoc.baseline_dict.average_fit_rms 
        for chan, tmp_tuple in channels_to_analyze.items():
            (energy_scaler, pfn_dir, max_energy) = tmp_tuple
            file_name = eval("rundoc.output_data_file_tier_3.%s.pfn" % pfn_dir)
            open_file = TFile(file_name)
            main_tree = open_file.Get("wf_analysis") 
            coinc_tree = open_file.Get("event_coincidence") 
            main_tree.AddFriend(coinc_tree)
            file_to_output.cd()
            hist_2d = histograms[chan]
            main_tree.Draw("(endRiseTime-startRiseTime)/100:(-energy%s)>> +%s" % \
               (energy_scaler,hist_2d.GetName()), \
               "channel==%i && !(event_coincidence.coincidence & 0x280) && \
                lastInhibitTimeDif > 1e5 && abs(fitConstant-%f) <= 3*%f" % \
               (chan, real_value, real_value_rms), "goff")
            open_file.Close()

    file_to_output.cd()
    for object in objects_to_write:
        object.Write(object.GetName(), TObject.kOverwrite)

    file_to_output.Close()
from management.soudan_database import SoudanServer

server = SoudanServer()

# loop over all the accepted runs
for record in server.get_accepted_runs():
    doc = server.get_run(record.id)
    print "Run number: %s, gretina file: %s" %(doc._get_id(), doc.root_data_file_tier_1.pfn)
示例#4
0
def analyze_channel_rates(outputfile='temp.root', force_overwrite=False):
    server = SoudanServer()
    file_list = server.get_accepted_runs()
    channel_to_analyze = { 0 : "Low-energy channel",\
                           1 : "Low-energy channel trigger",\
                           2 : "Muon veto",\
                           7 : "Pulser channel",\
                           8 : "High-energy channel",\
                           9 : "Reset inhibit"}

    
    #c1 = TCanvas()
    graph_list = {}
    time_list = []
    for channel in channel_to_analyze.keys(): graph_list[channel] = []
    for id in file_list:
        print id
        rundoc = server.get_run(id.id) 
        open_file = TFile(rundoc.output_data_file_tier_2.pfn)
        if len(time_list) == 0:
            first_time = rundoc.time_of_start_of_run
        time_list.append(rundoc.time_of_start_of_run - first_time)
        main_tree = open_file.Get("wf_analysis") 
        for channel in channel_to_analyze.keys():
            num_events = main_tree.Draw(">> eventList", "channel==%i" % channel, "goff")
            graph_list[channel].append(num_events/float(rundoc.livetime.run_seconds))
        open_file.Close()

    file_to_output = TFile(outputfile, 'recreate');
    objects_to_write = []
    # generate final plots
    last_time = time_list[len(time_list)-1]
    for channel, data_list in graph_list.items():
         
        file_to_output.cd()
        new_graph = TGraph(len(data_list))
        new_graph.SetNameTitle(channel_to_analyze[channel].replace(' ' , ''),\
                               channel_to_analyze[channel].replace(' ' , ''))
        new_hist = TH1D(channel_to_analyze[channel].replace(' ', '') + "hist",\
                        channel_to_analyze[channel], 100, 0, \
                        last_time.days + 1)
        new_hist.GetXaxis().SetTitle("Run start time (days)")
        new_hist.GetYaxis().SetTitle("Rate (Hz)")

        maximum = data_list[0]
        minimum = data_list[0]
        for i in range(len(data_list)):
            new_graph.SetPoint(i, time_list[i].days + time_list[i].seconds/(24*3600.),\
                               data_list[i])
            if minimum > data_list[i]: minimum = data_list[i]
            if maximum < data_list[i]: maximum = data_list[i]

        ten_percent = (maximum - minimum)*0.1
        new_hist.SetMaximum(maximum + ten_percent)
        new_hist.SetMinimum(minimum - ten_percent)
        #new_graph.SetHistogram(new_hist)
        objects_to_write.append(new_graph)
        objects_to_write.append(new_hist)

    file_to_output.cd()
    for object in objects_to_write:
        object.Write(object.GetName(), TObject.kOverwrite)
    file_to_output.Close()
def analyze_trigger_efficiency(outputfile='temp.root', force_overwrite=False):
    server = SoudanServer()
    file_list = server.get_accepted_runs()

    time_list = []
    scaling = []
    offset = []
    for id in file_list:
        print id
        rundoc = server.get_run(id.id) 
        if len(time_list) == 0:
            first_time = rundoc.time_of_start_of_run
        time_list.append(rundoc.time_of_start_of_run - first_time)
        scaling.append((rundoc.trigger_efficiency.scaling, \
                       rundoc.trigger_efficiency.scaling_err))
        offset.append((rundoc.trigger_efficiency.offset, \
                       rundoc.trigger_efficiency.offset_err))

    file_to_output = TFile(outputfile, 'recreate');
    objects_to_write = []
    # generate final plots
    first_data_point = 0
    for i in range(len(scaling)):
        if scaling[i][0]:
            first_data_point = i
            break

    time_list = time_list[first_data_point:len(time_list)]
    list_to_analyze = [ ("Scaling", "Scaling (keV^{-1})", scaling[first_data_point:len(scaling)]),\
                        ("Offset", "Offset (keV)", offset[first_data_point:len(offset)]) ]
    for name, axis_name, data_list in list_to_analyze:
         
        file_to_output.cd()
        new_graph = TGraphErrors(len(data_list))
        new_graph.SetNameTitle(name.replace(' ' , ''),\
                               name.replace(' ' , ''))
        new_hist = TH1F(name.replace(' ', '') + "hist",\
                        name, 100, time_list[0].days, \
                        time_list[len(time_list)-1].days + 1)
        new_hist.GetXaxis().SetTitle("Run start time (days)")
        new_hist.GetYaxis().SetTitle(axis_name)
        new_hist.GetYaxis().SetTitleOffset(1.17)

        maximum = data_list[0][0]
        minimum = data_list[0][0]
        for i in range(len(data_list)):
            new_graph.SetPoint(i, time_list[i].days + time_list[i].seconds/(24*3600.),\
                               data_list[i][0])
            new_graph.SetPointError(i, 0,\
                               data_list[i][1])
            if minimum > data_list[i][0]: minimum = data_list[i][0]
            if maximum < data_list[i][0]: maximum = data_list[i][0]

        ten_percent = (maximum - minimum)*0.1
        new_hist.SetMaximum(maximum + ten_percent)
        new_hist.SetMinimum(minimum - ten_percent)
        #new_graph.SetHistogram(new_hist)
        objects_to_write.append(new_graph)
        objects_to_write.append(new_hist)

    file_to_output.cd()
    for object in objects_to_write:
        object.Write(object.GetName(), TObject.kOverwrite)
    file_to_output.Close()