def get_merged_bin_resolution(res_file, var, low_bin, high_bin):
    '''
    Return mean value of resolutions in fine bins of the merged bin.
    '''

    bin_contents = []

    f = File(res_file)
    res_hist = f.Get('res_r_' + var).Clone()
    # change scope from file to memory
    res_hist.SetDirectory(0)
    f.close()

    low_bin_n = res_hist.GetXaxis().FindBin(low_bin)
    high_bin_n = res_hist.GetXaxis().FindBin(high_bin)

    for bin_i in range(low_bin_n, high_bin_n + 1):
        bin_content = res_hist.GetBinContent(bin_i)
        # resolution couldnt be reconstructed (High GeV with low stats)
        # remove these from list of resolutions
        if bin_content == 0: continue
        bin_contents.append(bin_content)

    # print(bin_contents)
    res = np.mean(bin_contents)
    return res
def get_merged_bin_resolution(res_file, var, low_bin, high_bin):
    '''
    Return mean value of resolutions in fine bins of the merged bin.
    '''

    bin_contents = []

    f = File( res_file )
    res_hist = f.Get( 'res_r_'+var ).Clone()
    # change scope from file to memory
    res_hist.SetDirectory( 0 )
    f.close()

    low_bin_n = res_hist.GetXaxis().FindBin(low_bin)
    high_bin_n = res_hist.GetXaxis().FindBin(high_bin)

    for bin_i in range(low_bin_n, high_bin_n+1):
        bin_content = res_hist.GetBinContent(bin_i)
        # resolution couldnt be reconstructed (High GeV with low stats)
        # remove these from list of resolutions
        if bin_content == 0 : continue
        bin_contents.append(bin_content)

    # print(bin_contents)
    res = np.mean(bin_contents)
    return res
示例#3
0
def read_timespan(filename):
    t_file = File(filename, 'read')
    m = ref_time.match(t_file.get('m_utc_span').GetTitle())
    week1, second1, week2, second2 = int(m.group(1)), int(m.group(2)), int(
        m.group(3)), int(m.group(4))
    t_file.close()
    return (week1 * 604800 + second1, week2 * 604800 + second2)
def get_histograms( variable, options ):
    config = XSectionConfig( 13 )

    path_electron = ''
    path_muon = ''
    path_combined = ''    
    histogram_name = ''
    if options.visiblePhaseSpace:
        histogram_name = 'responseVis_without_fakes'
    else :
        histogram_name = 'response_without_fakes'

    if variable == 'HT':
        path_electron = 'unfolding_HT_analyser_electron_channel/%s' % histogram_name
        path_muon = 'unfolding_HT_analyser_muon_channel/%s' % histogram_name
        path_combined = 'unfolding_HT_analyser_COMBINED_channel/%s' % histogram_name

    else :
        path_electron = 'unfolding_%s_analyser_electron_channel_patType1CorrectedPFMet/%s' % ( variable, histogram_name )
        path_muon = 'unfolding_%s_analyser_muon_channel_patType1CorrectedPFMet/%s' % ( variable, histogram_name )
        path_combined = 'unfolding_%s_analyser_COMBINED_channel_patType1CorrectedPFMet/%s' % ( variable, histogram_name )

    histogram_information = [
                {'file': config.unfolding_central_raw,
                 'CoM': 13,
                 'path':path_electron,
                 'channel':'electron'},
                {'file':config.unfolding_central_raw,
                 'CoM': 13,
                 'path':path_muon,
                 'channel':'muon'},
                ]
    
    if options.combined:
        histogram_information = [
                    {'file': config.unfolding_central_raw,
                     'CoM': 13,
                     'path': path_combined,
                     'channel':'combined'},
                    ]

    for histogram in histogram_information:
        f = File( histogram['file'] )
        # scale to lumi
        # nEvents = f.EventFilter.EventCounter.GetBinContent( 1 )  # number of processed events 
        # config = XSectionConfig( histogram['CoM'] )
        # lumiweight = config.ttbar_xsection * config.new_luminosity / nEvents

        lumiweight = 1

        histogram['hist'] = f.Get( histogram['path'] ).Clone()
        histogram['hist'].Scale( lumiweight )
        # change scope from file to memory
        histogram['hist'].SetDirectory( 0 )
        f.close()

    return histogram_information
示例#5
0
def get_histograms(config, variable, args):
    '''
    Return a dictionary of the unfolding histogram informations (inc. hist)
    '''
    path_electron = ''
    path_muon = ''
    path_combined = ''
    histogram_name = 'response_without_fakes'
    if args.visiblePhaseSpace:
        histogram_name = 'responseVis_without_fakes'

    path_electron = '%s_electron/%s' % (variable, histogram_name)
    path_muon = '%s_muon/%s' % (variable, histogram_name)
    path_combined = '%s_combined/%s' % (variable, histogram_name)

    histogram_information = [
        {
            'file': config.unfolding_central_raw,
            'CoM': 13,
            'path': path_electron,
            'channel': 'electron'
        },
        {
            'file': config.unfolding_central_raw,
            'CoM': 13,
            'path': path_muon,
            'channel': 'muon'
        },
    ]

    if args.combined:
        histogram_information = [
            {
                'file': config.unfolding_central_raw,
                'CoM': 13,
                'path': path_combined,
                'channel': 'combined'
            },
        ]

    for histogram in histogram_information:
        lumiweight = 1
        f = File(histogram['file'])
        histogram['hist'] = f.Get(histogram['path']).Clone()

        # scale to current lumi
        lumiweight = config.luminosity_scale
        if round(lumiweight, 1) != 1.0:
            print("Scaling to {}".format(lumiweight))
        histogram['hist'].Scale(lumiweight)

        # change scope from file to memory
        histogram['hist'].SetDirectory(0)
        f.close()

    return histogram_information
def get_histograms( config, variable, args ):
    '''
    Return a dictionary of the unfolding histogram informations (inc. hist)
    '''
    path_electron   = ''
    path_muon       = ''
    path_combined   = ''    
    histogram_name  = 'response_without_fakes'
    if args.visiblePhaseSpace:
        histogram_name = 'responseVis_without_fakes'

    path_electron = '%s_electron/%s' % ( variable, histogram_name )
    path_muon     = '%s_muon/%s'     % ( variable, histogram_name )
    path_combined = '%s_combined/%s' % ( variable, histogram_name )

    histogram_information = [
        {
            'file'    : config.unfolding_central_raw,
            'CoM'     : 13,
            'path'    : path_electron,
            'channel' :'electron'
        },
        {
            'file'    : config.unfolding_central_raw,
            'CoM'     : 13,
            'path'    : path_muon,
            'channel' :'muon'
        },
    ]
    
    if args.combined:
        histogram_information = [
            {
                'file'    : config.unfolding_central_raw,
                'CoM'     : 13,
                'path'    : path_combined,
                'channel' : 'combined'
            },
        ]

    for histogram in histogram_information:
        lumiweight = 1
        f = File( histogram['file'] )
        histogram['hist'] = f.Get( histogram['path'] ).Clone()

        # scale to current lumi
        lumiweight = config.luminosity_scale
        if round(lumiweight, 1) != 1.0:
            print( "Scaling to {}".format(lumiweight) )
        histogram['hist'].Scale( lumiweight )

        # change scope from file to memory
        histogram['hist'].SetDirectory( 0 )
        f.close()

    return histogram_information
示例#7
0
def get_histograms( variable ):
    config_7TeV = XSectionConfig( 7 )
    config_8TeV = XSectionConfig( 8 )
    
    path_electron = ''
    path_muon = ''
    histogram_name = 'response_without_fakes'
    if variable == 'MET':
        path_electron = 'unfolding_MET_analyser_electron_channel_patType1CorrectedPFMet/%s' % histogram_name
        path_muon = 'unfolding_MET_analyser_muon_channel_patType1CorrectedPFMet/%s' % histogram_name
    elif variable == 'HT':
        path_electron = 'unfolding_HT_analyser_electron_channel/%s' % histogram_name
        path_muon = 'unfolding_HT_analyser_muon_channel/%s' % histogram_name
    elif variable == 'ST':
        path_electron = 'unfolding_ST_analyser_electron_channel_patType1CorrectedPFMet/%s' % histogram_name
        path_muon = 'unfolding_ST_analyser_muon_channel_patType1CorrectedPFMet/%s' % histogram_name
    elif variable == 'MT':
        path_electron = 'unfolding_MT_analyser_electron_channel_patType1CorrectedPFMet/%s' % histogram_name
        path_muon = 'unfolding_MT_analyser_muon_channel_patType1CorrectedPFMet/%s' % histogram_name
    elif variable == 'WPT':
        path_electron = 'unfolding_WPT_analyser_electron_channel_patType1CorrectedPFMet/%s' % histogram_name
        path_muon = 'unfolding_WPT_analyser_muon_channel_patType1CorrectedPFMet/%s' % histogram_name
        
    histogram_information = [
                {'file': config_7TeV.unfolding_madgraph_raw,
                 'CoM': 7,
                 'path':path_electron,
                 'channel':'electron'},
                {'file':config_7TeV.unfolding_madgraph_raw,
                 'CoM': 7,
                 'path':path_muon,
                 'channel':'muon'},
                {'file':config_8TeV.unfolding_madgraph_raw,
                 'CoM': 8,
                 'path':path_electron,
                 'channel':'electron'},
                {'file':config_8TeV.unfolding_madgraph_raw,
                 'CoM': 8,
                 'path':path_muon,
                 'channel':'muon'},
                   ]
    
    for histogram in histogram_information:
        f = File( histogram['file'] )
        # scale to lumi
        nEvents = f.EventFilter.EventCounter.GetBinContent( 1 )  # number of processed events 
        config = XSectionConfig( histogram['CoM'] )
        lumiweight = config.ttbar_xsection * config.new_luminosity / nEvents

        histogram['hist'] = f.Get( histogram['path'] ).Clone()
        histogram['hist'].Scale( lumiweight )
        # change scope from file to memory
        histogram['hist'].SetDirectory( 0 )
        f.close()
    return histogram_information
示例#8
0
def convert_unfolding_histograms(file_name,
                                 histograms_to_load=[
                                     'truth',
                                     'fake',
                                     'measured',
                                     'response',
                                     'response_withoutFakes',
                                     'response_without_fakes',
                                     'EventCounter',
                                 ]):

    file_start = Timer()
    print 'Converting', file_name
    histograms = {}
    with File(file_name) as f:
        for path, _, objects in f.walk():
            # keep only unfolding and EventFilter
            if path.startswith('unfolding_') or path == 'EventFilter':
                histograms[path] = {}
                for hist_name in objects:
                    if hist_name in histograms_to_load:
                        hist = f.Get(path + '/' + hist_name).Clone()
                        hist.SetDirectory(0)
                        histograms[path][hist_name] = hist
    new_histograms = {}
    # rebin
    for path, hists in histograms.iteritems():
        new_histograms[path] = {}
        variable = ''
        if not path == 'EventFilter':
            variable = path.split('_')[1]
        for name, hist in hists.iteritems():
            if name == 'EventCounter':
                new_histograms[path][name] = hist.Clone()
            else:
                new_hist = hist.rebinned(bin_edges_vis[variable])
                if 'TH2' in new_hist.class_name():
                    new_hist = new_hist.rebinned(bin_edges_vis[variable],
                                                 axis=1)
                new_histograms[path][name] = new_hist

    # save_to_file
    output = File(file_name.replace('.root', '_asymmetric.root'), 'recreate')
    for path, hists in new_histograms.iteritems():
        directory = output.mkdir(path)
        directory.cd()
        for name, hist in hists.iteritems():
            if name == 'response_withoutFakes':  # fix this name
                hist.Write('response_without_fakes')
            else:
                hist.Write(name)
    output.close()
    secs = file_start.elapsed_time()
    print 'File %s converted in %d seconds' % (file_name, secs)
示例#9
0
def read_timespan(filename, dat_type):
    t_file = File(filename, 'read')
    m = ref_time.match(t_file.get(tnamed_dict[dat_type]).GetTitle())
    week1, second1, week2, second2 = int(m.group(1)), int(m.group(2)), int(
        m.group(3)), int(m.group(4))
    t_file.close()
    time_seconds_begin = week1 * 604800 + second1
    time_seconds_end = week2 * 604800 + second2
    beijing_time_begin = datetime(1980, 1, 6, 0, 0, 0) + timedelta(
        seconds=time_seconds_begin - leap_seconds_dict[dat_type] + 28800)
    beijing_time_end = datetime(1980, 1, 6, 0, 0, 0) + timedelta(
        seconds=time_seconds_end - leap_seconds_dict[dat_type] + 28800)
    return (beijing_time_begin, beijing_time_end)
示例#10
0
def convert_unfolding_histograms(file_name,
                                 histograms_to_load=['truth',
                                                     'fake', 'measured', 'response',
                                                     'response_withoutFakes', 'response_without_fakes',
                                                     'EventCounter',
                                                     ]):

    file_start = Timer()
    print 'Converting', file_name
    histograms = {}
    with File(file_name) as f:
        for path, _, objects in f.walk():
            # keep only unfolding and EventFilter
            if path.startswith('unfolding_') or path == 'EventFilter':
                histograms[path] = {}
                for hist_name in objects:
                    if hist_name in histograms_to_load:
                        hist = f.Get(path + '/' + hist_name).Clone()
                        hist.SetDirectory(0)
                        histograms[path][hist_name] = hist
    new_histograms = {}
    # rebin
    for path, hists in histograms.iteritems():
        new_histograms[path] = {}
        variable = ''
        if not path == 'EventFilter':
            variable = path.split('_')[1]
        for name, hist in hists.iteritems():
            if name == 'EventCounter':
                new_histograms[path][name] = hist.Clone()
            else:
                new_hist = hist.rebinned(bin_edges_vis[variable])
                if 'TH2' in new_hist.class_name():
                    new_hist = new_hist.rebinned(bin_edges_vis[variable], axis=1)
                new_histograms[path][name] = new_hist

    # save_to_file
    output = File(file_name.replace('.root', '_asymmetric.root'), 'recreate')
    for path, hists in new_histograms.iteritems():
        directory = output.mkdir(path)
        directory.cd()
        for name, hist in hists.iteritems():
            if name == 'response_withoutFakes':  # fix this name
                hist.Write('response_without_fakes')
            else:
                hist.Write(name)
    output.close()
    secs = file_start.elapsed_time()
    print 'File %s converted in %d seconds' % (file_name, secs)
def main(options, args):
    config = XSectionConfig(options.CoM)
    variables = ['MET', 'HT', 'ST', 'WPT']
    channels = ['electron', 'muon', 'combined']
    m_file = 'normalised_xsection_patType1CorrectedPFMet.txt'
    m_with_errors_file = 'normalised_xsection_patType1CorrectedPFMet_with_errors.txt'
    path_template = args[0]
    output_file = 'measurement_{0}TeV.root'.format(options.CoM)
    f = File(output_file, 'recreate')
    for channel in channels:
        d = f.mkdir(channel)
        d.cd()
        for variable in variables:
            dv = d.mkdir(variable)
            dv.cd()
            if channel == 'combined':
                path = path_template.format(variable=variable,
                                            channel=channel,
                                            centre_of_mass_energy=options.CoM)
            else:
                kv = channel + \
                    '/kv{0}/'.format(config.k_values[channel][variable])
                path = path_template.format(variable=variable,
                                            channel=kv,
                                            centre_of_mass_energy=options.CoM)

            m = read_data_from_JSON(path + '/' + m_file)
            m_with_errors = read_data_from_JSON(path + '/' +
                                                m_with_errors_file)

            for name, result in m.items():
                h = make_histogram(result, bin_edges[variable])
                h.SetName(name)
                h.write()

            for name, result in m_with_errors.items():
                if not 'TTJet' in name:
                    continue
                h = make_histogram(result, bin_edges[variable])
                h.SetName(name + '_with_syst')
                h.write()
            dv.write()
            d.cd()
        d.write()
    f.write()
    f.close()
def main(options, args):
    config = XSectionConfig(options.CoM)
    variables = ['MET', 'HT', 'ST', 'WPT']
    channels = ['electron', 'muon', 'combined']
    m_file = 'normalised_xsection_patType1CorrectedPFMet.txt'
    m_with_errors_file = 'normalised_xsection_patType1CorrectedPFMet_with_errors.txt'
    path_template = args[0]
    output_file = 'measurement_{0}TeV.root'.format(options.CoM)
    f = File(output_file, 'recreate')
    for channel in channels:
        d = f.mkdir(channel)
        d.cd()
        for variable in variables:
            dv = d.mkdir(variable)
            dv.cd()
            if channel == 'combined':
                path = path_template.format(variable=variable,
                                            channel=channel,
                                            centre_of_mass_energy=options.CoM)
            else:
                kv = channel + \
                    '/kv{0}/'.format(config.k_values[channel][variable])
                path = path_template.format(variable=variable,
                                            channel=kv,
                                            centre_of_mass_energy=options.CoM)

            m = read_data_from_JSON(path + '/' + m_file)
            m_with_errors = read_data_from_JSON(
                path + '/' + m_with_errors_file)

            for name, result in m.items():
                h = make_histogram(result, bin_edges_full[variable])
                h.SetName(name)
                h.write()

            for name, result in m_with_errors.items():
                if not 'TTJet' in name:
                    continue
                h = make_histogram(result, bin_edges_full[variable])
                h.SetName(name + '_with_syst')
                h.write()
            dv.write()
            d.cd()
        d.write()
    f.write()
    f.close()
示例#13
0
def get_histograms(variable, options):
    config = XSectionConfig(13)

    path_electron = ""
    path_muon = ""
    path_combined = ""
    histogram_name = ""
    if options.visiblePhaseSpace:
        histogram_name = "responseVis_without_fakes"
    else:
        histogram_name = "response_without_fakes"

    path_electron = "%s_electron/%s" % (variable, histogram_name)
    path_muon = "%s_muon/%s" % (variable, histogram_name)
    path_combined = "%s_combined/%s" % (variable, histogram_name)

    histogram_information = [
        {"file": config.unfolding_central_raw, "CoM": 13, "path": path_electron, "channel": "electron"},
        {"file": config.unfolding_central_raw, "CoM": 13, "path": path_muon, "channel": "muon"},
    ]

    if options.combined:
        histogram_information = [
            {"file": config.unfolding_central_raw, "CoM": 13, "path": path_combined, "channel": "combined"}
        ]

    for histogram in histogram_information:
        f = File(histogram["file"])
        # scale to lumi
        # nEvents = f.EventFilter.EventCounter.GetBinContent( 1 )  # number of processed events
        # config = XSectionConfig( histogram['CoM'] )
        # lumiweight = config.ttbar_xsection * config.new_luminosity / nEvents

        lumiweight = 1

        histogram["hist"] = f.Get(histogram["path"]).Clone()
        histogram["hist"].Scale(lumiweight)
        # change scope from file to memory
        histogram["hist"].SetDirectory(0)
        f.close()

    return histogram_information
示例#14
0
                max_count_mat[idx][j] = entry.cnts_ps[j]
                max_time_mat[idx][j] = entry.time_sec
                max_index_mat[idx][j] = it

for idx in xrange(25):
    for j in xrange(64):
        begin_time_mat[idx][j] = max_time_mat[idx][j]
        end_time_mat[idx][j] = max_time_mat[idx][j]
        for t in xrange(1, 15):
            if max_index_mat[idx][j] - t < 0: break
            t_rate[idx].get_entry(max_index_mat[idx][j] - t)
            begin_time_mat[idx][j] = t_rate[idx].time_sec
            if t_rate[idx].cnts_ps[j] < max_count_mat[idx][j] * 0.4: break
        for t in xrange(1, 15):
            if max_index_mat[idx][j] + t > t_rate[idx].get_entries() - 1: break
            t_rate[idx].get_entry(max_index_mat[idx][j] + t)
            if t_rate[idx].cnts_ps[j] < max_count_mat[idx][j] * 0.4: break
            end_time_mat[idx][j] = t_rate[idx].time_sec

# ===============================

t_file_out.cd()
begin_time_mat.Write("begin_time_mat")
end_time_mat.Write("end_time_mat")
max_count_mat.Write("max_count_mat")
max_time_mat.Write("max_time_mat")

t_file_out.close()

t_file_in.close()
示例#15
0
class ppd_file_r:
    def __init__(self):
        self.t_file_name = ''
        self.t_file_in = None
        self.t_tree_ppd = None
        self.begin_entry = 0
        self.end_entry = 0
        self.utc_time_span = ''
        self.first_utc_time_sec = 0.0
        self.last_utc_time_sec = 0.0
        self.begin_utc_time_sec = 0.0
        self.end_utc_time_sec = 0.0

    def __find_entry(self, utc_time_sec):
        head_entry = -1
        head_entry_found = False
        while head_entry < self.t_tree_ppd.get_entries():
            head_entry += 1
            self.t_tree_ppd.get_entry(head_entry)
            if self.t_tree_ppd.flag_of_pos == 0x55 and self.t_tree_ppd.utc_time_sec > 0:
                head_entry_found = True
                break
        if not head_entry_found:
            return -1
        if utc_time_sec < self.t_tree_ppd.utc_time_sec:
            return -1
        tail_entry = self.t_tree_ppd.get_entries()
        tail_entry_found = False
        while tail_entry >= 0:
            tail_entry -= 1
            self.t_tree_ppd.get_entry(tail_entry)
            if self.t_tree_ppd.flag_of_pos == 0x55 and self.t_tree_ppd.utc_time_sec > 0:
                tail_entry_found = True
                break
        if not tail_entry_found:
            return -1
        if utc_time_sec > self.t_tree_ppd.utc_time_sec:
            return -1
        while tail_entry - head_entry > 1:
            center_entry = int((head_entry + tail_entry) / 2)
            found_valid_center = False
            self.t_tree_ppd.get_entry(center_entry)
            if self.t_tree_ppd.flag_of_pos == 0x55 and self.t_tree_ppd.utc_time_sec > 0:
                found_valid_center = True
            tmp_center_entry = center_entry
            while not found_valid_center and tail_entry - tmp_center_entry > 1:
                tmp_center_entry += 1
                self.t_tree_ppd.get_entry(tmp_center_entry)
                if self.t_tree_ppd.flag_of_pos == 0x55 and self.t_tree_ppd.utc_time_sec > 0:
                    found_valid_center = True
            if not found_valid_center: tmp_center_entry = center_entry
            while not found_valid_center and tmp_center_entry - head_entry > 1:
                tmp_center_entry -= 1
                self.t_tree_ppd.get_entry(tmp_center_entry)
                if self.t_tree_ppd.flag_of_pos == 0x55 and self.t_tree_ppd.utc_time_sec > 0:
                    found_valid_center = True
            if not found_valid_center: break
            if utc_time_sec == self.t_tree_ppd.utc_time_sec:
                return tmp_center_entry
            elif utc_time_sec > self.t_tree_ppd.utc_time_sec:
                head_entry = tmp_center_entry
            else:
                tail_entry = tmp_center_entry
        return tail_entry

    def open_file(self, filename, begin,
                  end):  # cut data by utc time, utc_week:utc_second
        self.t_file_name = basename(filename)
        self.t_file_in = File(filename, 'read')
        self.t_tree_ppd = self.t_file_in.get('t_ppd')
        self.t_tree_ppd.create_buffer()
        self.utc_time_span = self.t_file_in.get('m_utc_span').GetTitle()
        m = re.compile(
            r'(\d+):(\d+)\[\d+\] => (\d+):(\d+)\[\d+\]; \d+/\d+').match(
                self.utc_time_span)
        self.first_utc_time_sec = float(m.group(1)) * 604800 + float(
            m.group(2))
        self.last_utc_time_sec = float(m.group(3)) * 604800 + float(m.group(4))
        if begin != 'begin':
            m = re.compile(r'(\d+):(\d+)').match(begin)
            self.begin_utc_time_sec = float(m.group(1)) * 604800 + float(
                m.group(2))
            if self.begin_utc_time_sec - self.first_utc_time_sec < _MIN_DIFF:
                print 'WARNING: begin utc time is out of range: ' + str(
                    self.begin_utc_time_sec - self.first_utc_time_sec)
                return False
        else:
            self.begin_utc_time_sec = -1
        if end != 'end':
            m = re.compile(r'(\d+):(\d+)').match(end)
            self.end_utc_time_sec = float(m.group(1)) * 604800 + float(
                m.group(2))
            if self.last_utc_time_sec - self.end_utc_time_sec < _MIN_DIFF:
                print 'WARNING: end utc time is out of range: ' + str(
                    self.last_utc_time_sec - self.end_utc_time_sec)
                return False
        else:
            self.end_utc_time_sec = -1
        if self.begin_utc_time_sec > 0 and self.end_utc_time_sec > 0 and self.end_utc_time_sec - self.begin_utc_time_sec < _MIN_DIFF:
            print 'WARNING: time span between begin and end utc time is too small: ' + str(
                self.end_utc_time_sec - self.begin_utc_time_sec)
            return False
        if self.begin_utc_time_sec > 0:
            self.begin_entry = self.__find_entry(self.begin_utc_time_sec)
            if self.begin_entry < 0:
                print "WARNING: cannot find begin entry."
                return False
        else:
            self.begin_entry = 0
        if self.end_utc_time_sec > 0:
            self.end_entry = self.__find_entry(self.end_utc_time_sec)
            if self.end_entry < 0:
                print "WARNING: cannot find end entry."
                return False
        else:
            self.end_entry = self.t_tree_ppd.get_entries()
        return True

    def print_file_info(self):
        actual_start_entry = 0
        for idx in xrange(self.begin_entry, self.end_entry):
            actual_start_entry = idx
            self.t_tree_ppd.get_entry(idx)
            if self.t_tree_ppd.flag_of_pos == 0x55 and self.t_tree_ppd.utc_time_sec > 0:
                break
        actual_begin_utc_time_sec = self.t_tree_ppd.utc_time_sec
        actual_end_entry = 0
        for idx in xrange(self.end_entry - 1, self.begin_entry - 1, -1):
            actual_end_entry = idx
            self.t_tree_ppd.get_entry(idx)
            if self.t_tree_ppd.flag_of_pos == 0x55 and self.t_tree_ppd.utc_time_sec > 0:
                break
        actual_end_utc_time_sec = self.t_tree_ppd.utc_time_sec
        utc_time_span_str = '%d:%d[%d] => %d:%d[%d]' % (
            int(actual_begin_utc_time_sec / 604800),
            int(actual_begin_utc_time_sec % 604800), actual_start_entry,
            int(actual_end_utc_time_sec / 604800),
            int(actual_end_utc_time_sec % 604800), actual_end_entry)
        print self.t_file_name
        print ' - UTC time span: { ' + utc_time_span_str + ' }'

    def close_file(self):
        self.t_file_in.close()
        self.t_file_in = None
        self.t_tree_ppd = None
from rootpy.tree import Tree
from rootpy.matrix import Matrix

if len(sys.argv) < 4:
    print "USAGE: " + basename(sys.argv[0]) + " <time_win_mat.root> <decoded_file.root> <merged_file.root>"
    exit(1)

time_win_filename = sys.argv[1]
decoded_data_filename = sys.argv[2]
merged_filename = sys.argv[3]

t_file_time_win = File(time_win_filename, "read")
begin_time_mat = t_file_time_win.get("begin_time_mat")
end_time_mat   = t_file_time_win.get("end_time_mat")
max_count_mat  = t_file_time_win.get("max_count_mat")
t_file_time_win.close()

t_file_merged_out = File(merged_filename, "recreate")
t_beam_event_tree = Tree("t_beam_event", "Beam Event Data")
t_beam_event_tree.create_branches({
    'type': 'I',
    'trig_accepted': 'B[25]',
    'time_aligned': 'B[25]',
    'pkt_count': 'I',
    'lost_count': 'I',
    'trigger_bit': 'B[1600]',
    'trigger_n': 'I',
    'multiplicity': 'I[25]',
    'energy_adc': 'F[1600]',
    'compress': 'I[25]',
    'common_noise': 'F[25]',
示例#17
0
from rootpy.interactive import wait
from cooconv import ijtox, ijtoy

if len(sys.argv) < 2:
    print "USAGE: show_adc_per_kev.py <adc_per_kev.root>"
    exit(1)

adc_per_kev_fn = sys.argv[1]
adc_per_kev_file = File(adc_per_kev_fn, 'read')
adc_per_kev = [None for i in xrange(25)]
adc_sigma = [None for i in xrange(25)]
for i in xrange(25):
    adc_per_kev[i] = adc_per_kev_file.get("adc_per_kev_vec_ct_%02d" % (i + 1))
    adc_sigma[i] = adc_per_kev_file.get("adc_sigma_vec_ct_%02d" % (i + 1))

adc_per_kev_file.close()

hist2d_adc_per_kev = Hist2D(40, 0, 40, 40, 0, 40)
hist2d_adc_per_kev.SetDirectory(None)
hist2d_adc_per_kev.SetName("hist2d_adc_per_kev")
hist2d_adc_per_kev.SetTitle("ADC/KeV of 1600 Channels")
hist2d_adc_per_kev.GetXaxis().SetNdivisions(40)
hist2d_adc_per_kev.GetYaxis().SetNdivisions(40)
for i in xrange(40):
    if (i % 8 == 0):
        hist2d_adc_per_kev.GetXaxis().SetBinLabel(i + 1, "%02d" % i)
        hist2d_adc_per_kev.GetYaxis().SetBinLabel(i + 1, "%02d" % i)

hist2d_adc_sigma = Hist2D(40, 0, 40, 40, 0, 40)
hist2d_adc_sigma.SetDirectory(None)
hist2d_adc_sigma.SetName("hist2d_adc_sigma")
示例#18
0
def save_to_root_file(histograms, file_name):
    output = File(file_name, 'recreate')
    output.cd()
    for histogram in histograms:
        histogram.Write()
    output.close()
示例#19
0
if len(sys.argv) < 4:
    print "USAGE: " + basename(
        sys.argv[0]
    ) + " <time_win_mat.root> <decoded_file.root> <merged_file.root>"
    exit(1)

time_win_filename = sys.argv[1]
decoded_data_filename = sys.argv[2]
merged_filename = sys.argv[3]

t_file_time_win = File(time_win_filename, "read")
begin_time_mat = t_file_time_win.get("begin_time_mat")
end_time_mat = t_file_time_win.get("end_time_mat")
max_count_mat = t_file_time_win.get("max_count_mat")
t_file_time_win.close()

t_file_decoded_data = File(decoded_data_filename, "read")
t_trigger = t_file_decoded_data.get("t_trigger")
t_trigger.deactivate(['status_bit', 'trig_sig_con_bit'], True)
t_trigger.create_buffer()
t_modules = t_file_decoded_data.get("t_modules")
t_modules.deactivate(['status_bit'], True)
t_modules.create_buffer()

t_file_merged_out = File(merged_filename, "recreate")
t_beam_event = Tree("t_beam_event", "Beam Event Data")
t_beam_event.create_branches({
    'type': 'I',
    'trig_accepted': 'B[25]',
    'time_aligned': 'B[25]',
示例#20
0
def aux_1m_read_timespan(filename):
    t_file = File(filename, 'read')
    m_oboxship = t_file.get('m_oboxship')
    t_file.close()
    return m_oboxship.GetTitle()
示例#21
0
class ppd_file_w:
    def __init__(self):
        self.__t_file_out = None
        self.__t_tree_ppd = None

    def open_file(self, filename):
        self.__t_file_out = File(filename, "recreate")
        self.__t_tree_ppd = Tree("t_ppd", "platform parameters data")
        self.__t_tree_ppd.create_branches({
            "pitch_angle"          : "D"     ,
            "yaw_angle"            : "D"     ,
            "roll_angle"           : "D"     ,
            "pitch_angle_v"        : "D"     ,
            "yaw_angle_v"          : "D"     ,
            "roll_angle_v"         : "D"     ,
            "orbit_agl_v"          : "D"     ,
            "longitude"            : "D"     ,
            "latitude"             : "D"     ,
            "geocentric_d"         : "D"     ,
            "ship_time_sec"        : "D"     ,
            "utc_time_sec"         : "D"     ,
            "utc_time_str"         : "C[32]" ,
            "flag_of_pos"          : "I"     ,
            "wgs84_x"              : "D"     ,
            "wgs84_y"              : "D"     ,
            "wgs84_z"              : "D"     ,
            "wgs84_x_v"            : "D"     ,
            "wgs84_y_v"            : "D"     ,
            "wgs84_z_v"            : "D"     ,
            "det_z_lat"            : "D"     ,
            "det_z_lon"            : "D"     ,
            "det_z_ra"             : "D"     ,
            "det_z_dec"            : "D"     ,
            "det_x_lat"            : "D"     ,
            "det_x_lon"            : "D"     ,
            "det_x_ra"             : "D"     ,
            "det_x_dec"            : "D"     ,
            "earth_lat"            : "D"     ,
            "earth_lon"            : "D"     ,
            "earth_ra"             : "D"     ,
            "earth_dec"            : "D"     ,
            "sun_ra"               : "D"     ,
            "sun_dec"              : "D"
            })

    def fill_data(self, ppd_obj):
        self.__t_tree_ppd.pitch_angle     = ppd_obj.pitch_angle
        self.__t_tree_ppd.yaw_angle       = ppd_obj.yaw_angle
        self.__t_tree_ppd.roll_angle      = ppd_obj.roll_angle
        self.__t_tree_ppd.pitch_angle_v   = ppd_obj.pitch_angle_v
        self.__t_tree_ppd.yaw_angle_v     = ppd_obj.yaw_angle_v
        self.__t_tree_ppd.roll_angle_v    = ppd_obj.roll_angle_v
        self.__t_tree_ppd.orbit_agl_v     = ppd_obj.orbit_agl_v
        self.__t_tree_ppd.longitude       = ppd_obj.longitude
        self.__t_tree_ppd.latitude        = ppd_obj.latitude
        self.__t_tree_ppd.geocentric_d    = ppd_obj.geocentric_d
        self.__t_tree_ppd.ship_time_sec   = ppd_obj.ship_time_sec
        self.__t_tree_ppd.utc_time_sec    = ppd_obj.utc_time_sec
        self.__t_tree_ppd.utc_time_str    = str(ppd_obj.utc_time_str)
        self.__t_tree_ppd.flag_of_pos     = ppd_obj.flag_of_pos
        self.__t_tree_ppd.wgs84_x         = ppd_obj.wgs84_x
        self.__t_tree_ppd.wgs84_y         = ppd_obj.wgs84_y
        self.__t_tree_ppd.wgs84_z         = ppd_obj.wgs84_z
        self.__t_tree_ppd.wgs84_x_v       = ppd_obj.wgs84_x_v
        self.__t_tree_ppd.wgs84_y_v       = ppd_obj.wgs84_y_v
        self.__t_tree_ppd.wgs84_z_v       = ppd_obj.wgs84_z_v
        self.__t_tree_ppd.det_z_lat       = ppd_obj.det_z_lat
        self.__t_tree_ppd.det_z_lon       = ppd_obj.det_z_lon
        self.__t_tree_ppd.det_z_ra        = ppd_obj.det_z_ra
        self.__t_tree_ppd.det_z_dec       = ppd_obj.det_z_dec
        self.__t_tree_ppd.det_x_lat       = ppd_obj.det_x_lat
        self.__t_tree_ppd.det_x_lon       = ppd_obj.det_x_lon
        self.__t_tree_ppd.det_x_ra        = ppd_obj.det_x_ra
        self.__t_tree_ppd.det_x_dec       = ppd_obj.det_x_dec
        self.__t_tree_ppd.earth_lat       = ppd_obj.earth_lat
        self.__t_tree_ppd.earth_lon       = ppd_obj.earth_lon
        self.__t_tree_ppd.earth_ra        = ppd_obj.earth_ra
        self.__t_tree_ppd.earth_dec       = ppd_obj.earth_dec
        self.__t_tree_ppd.sun_ra          = ppd_obj.sun_ra
        self.__t_tree_ppd.sun_dec         = ppd_obj.sun_dec
        self.__t_tree_ppd.fill()

    def write_tree(self):
        self.__t_file_out.cd()
        self.__t_tree_ppd.write()

    def write_meta(self, key, value):
        self.__t_file_out.cd()
        ROOT.TNamed(key, value).Write()

    def close_file(self):
        self.__t_file_out.close()
        self.__t_file_out = None
        self.__t_tree_ppd = None
def aux_1m_read_timespan(filename):
    t_file = File(filename, 'read')
    m_ibox_gps = t_file.get('m_ibox_gps')
    t_file.close()
    return m_ibox_gps.GetTitle()
def sci_1m_read_timespan(filename):
    t_file = File(filename, 'read')
    m_ped_gps_frm = t_file.get('m_ped_gps_frm')
    t_file.close()
    return m_ped_gps_frm.GetTitle()
示例#24
0
class sci_trigger_r:
    def __init__(self):
        self.t_file_name = ''
        self.t_file_in = None
        self.t_trigger = None
        self.m_phy_gps = ''
        self.begin_entry = 0
        self.end_entry = 0
        self.first_gps_time_sec = 0.0
        self.last_gps_time_sec = 0.0
        self.begin_gps_time_sec = 0.0
        self.end_gps_time_sec = 0.0
        self.start_week = 0
        self.start_second = 0.0
        self.stop_week = 0
        self.stop_second = 0.0
        self.gps_time_length = 0.0

    def __find_entry(self, gps_time_sec):
        head_entry = -1
        head_entry_found = False
        while head_entry < self.t_trigger.get_entries():
            head_entry += 1
            self.t_trigger.get_entry(head_entry)
            if self.t_trigger.abs_gps_valid:
                head_entry_found = True
                break
        if not head_entry_found:
            return -1
        if gps_time_sec < self.t_trigger.abs_gps_week * 604800 + self.t_trigger.abs_gps_second:
            return -1
        tail_entry = self.t_trigger.get_entries()
        tail_entry_found = False
        while tail_entry >= 0:
            tail_entry -= 1
            self.t_trigger.get_entry(tail_entry)
            if self.t_trigger.abs_gps_valid:
                tail_entry_found = True
                break
        if not tail_entry_found:
            return -1
        if gps_time_sec > self.t_trigger.abs_gps_week * 604800 + self.t_trigger.abs_gps_second:
            return -1
        while tail_entry - head_entry > 1:
            center_entry = int((head_entry + tail_entry) / 2)
            found_valid_center = False
            self.t_trigger.get_entry(center_entry)
            if self.t_trigger.abs_gps_valid:
                found_valid_center = True
            tmp_center_entry = center_entry
            while not found_valid_center and tail_entry - tmp_center_entry > 1:
                tmp_center_entry += 1
                self.t_trigger.get_entry(tmp_center_entry)
                if self.t_trigger.abs_gps_valid:
                    found_valid_center = True
            if not found_valid_center: tmp_center_entry = center_entry
            while not found_valid_center and tmp_center_entry - head_entry > 1:
                tmp_center_entry -= 1
                self.t_trigger.get_entry(tmp_center_entry)
                if self.t_trigger.abs_gps_valid:
                    found_valid_center = True
            if not found_valid_center: break
            if gps_time_sec == self.t_trigger.abs_gps_week * 604800 + self.t_trigger.abs_gps_second:
                return tmp_center_entry
            elif gps_time_sec > self.t_trigger.abs_gps_week * 604800 + self.t_trigger.abs_gps_second:
                head_entry = tmp_center_entry
            else:
                tail_entry = tmp_center_entry
        return tail_entry

    def open_file(self, filename, begin, end):
        self.t_file_name = basename(filename)
        self.t_file_in = File(filename, 'read')
        self.t_trigger = self.t_file_in.get('t_trigger')
        self.t_trigger.activate([
            'abs_gps_week', 'abs_gps_second', 'abs_gps_valid', 'trig_accepted'
        ], True)
        self.t_trigger.create_buffer()
        self.m_phy_gps = self.t_file_in.get('m_phy_gps').GetTitle()
        m = re.compile(
            r'(\d+):(\d+)\[\d+\] => (\d+):(\d+)\[\d+\]; \d+/\d+').match(
                self.m_phy_gps)
        self.first_gps_time_sec = float(m.group(1)) * 604800 + float(
            m.group(2))
        self.last_gps_time_sec = float(m.group(3)) * 604800 + float(m.group(4))
        if begin != 'begin':
            m = re.compile(r'(\d+):(\d+)').match(begin)
            self.begin_gps_time_sec = float(m.group(1)) * 604800 + float(
                m.group(2))
            if self.begin_gps_time_sec - self.first_gps_time_sec < _MIN_DIFF:
                print 'WARNING: begin gps time is out of range: ' + str(
                    self.begin_gps_time_sec - self.first_gps_time_sec)
                return False
        else:
            self.begin_gps_time_sec = -1
        if end != 'end':
            m = re.compile(r'(\d+):(\d+)').match(end)
            self.end_gps_time_sec = float(m.group(1)) * 604800 + float(
                m.group(2))
            if self.last_gps_time_sec - self.end_gps_time_sec < _MIN_DIFF:
                print 'WARNING: end gps time is out of range: ' + str(
                    self.last_gps_time_sec - self.end_gps_time_sec)
                return False
        else:
            self.end_gps_time_sec = -1
        if self.begin_gps_time_sec > 0 and self.end_gps_time_sec > 0 and self.end_gps_time_sec - self.begin_gps_time_sec < _MIN_DIFF:
            print 'WARNING: time span between begin and end gps time is too small: ' + str(
                self.end_gps_time_sec - self.begin_gps_time_sec)
            return False
        if self.begin_gps_time_sec > 0:
            self.begin_entry = self.__find_entry(self.begin_gps_time_sec)
            if self.begin_entry < 0:
                print "WARNING: cannot find begin entry."
                return False
        else:
            self.begin_entry = 0
        if self.end_gps_time_sec > 0:
            self.end_entry = self.__find_entry(self.end_gps_time_sec)
            if self.end_entry < 0:
                print "WARNING: cannot find end entry."
                return False
        else:
            self.end_entry = self.t_trigger.get_entries()
        for idx in xrange(self.begin_entry, self.end_entry):
            self.t_trigger.get_entry(idx)
            if self.t_trigger.abs_gps_valid:
                self.start_week = self.t_trigger.abs_gps_week
                self.start_second = self.t_trigger.abs_gps_second
                break
        for idx in xrange(self.end_entry - 1, self.begin_entry - 1, -1):
            self.t_trigger.get_entry(idx)
            if self.t_trigger.abs_gps_valid:
                self.stop_week = self.t_trigger.abs_gps_week
                self.stop_second = self.t_trigger.abs_gps_second
                break
        self.gps_time_length = (self.stop_week - self.start_week) * 604800 + (
            self.stop_second - self.start_second)
        return True

    def print_file_info(self):
        actual_start_entry = 0
        for idx in xrange(self.begin_entry, self.end_entry):
            actual_start_entry = idx
            self.t_trigger.get_entry(idx)
            if self.t_trigger.abs_gps_valid: break
        actual_begin_gps_week = self.t_trigger.abs_gps_week
        actual_begin_gps_second = self.t_trigger.abs_gps_second
        actual_end_entry = 0
        for idx in xrange(self.end_entry - 1, self.begin_entry - 1, -1):
            actual_end_entry = idx
            self.t_trigger.get_entry(idx)
            if self.t_trigger.abs_gps_valid: break
        actual_end_gps_week = self.t_trigger.abs_gps_week
        actual_end_gps_second = self.t_trigger.abs_gps_second
        gps_time_span_str = '%d:%d[%d] => %d:%d[%d]' % (
            int(actual_begin_gps_week), int(actual_begin_gps_second),
            actual_start_entry, int(actual_end_gps_week),
            int(actual_end_gps_second), actual_end_entry)
        print self.t_file_name
        print ' - GPS time span: { ' + gps_time_span_str + ' }'

    def close_file(self):
        self.t_file_in.close()
        self.t_file_in = None
        self.t_trigger = None
示例#25
0
def sci_1p_read_timespan(filename):
    t_file = File(filename, 'read')
    m_pedship = t_file.get('m_pedship')
    t_file.close()
    return m_pedship.GetTitle()
def save_to_root_file(histograms, file_name):
    output = File(file_name, "recreate")
    output.cd()
    for histogram in histograms:
        histogram.Write()
    output.close()
示例#27
0
def ppd_1n_read_timespan(filename):
    t_file = File(filename, 'read')
    m_shipspan = t_file.get('m_shipspan')
    t_file.close()
    return m_shipspan.GetTitle()
        t_tree_ppd.wgs84_z            = float(row[17])
        t_tree_ppd.wgs84_x_v          = float(row[18])
        t_tree_ppd.wgs84_y_v          = float(row[19])
        t_tree_ppd.wgs84_z_v          = float(row[20])
        t_tree_ppd.fill()
        if cur_flag_of_pos != 0x55: continue
        if ship_time_is_first:
            ship_time_is_first  = False
            first_ship_time_sec = cur_ship_time_sec
        last_ship_time_sec = cur_ship_time_sec
        if utc_time_is_first:
            utc_time_is_first  = False
            first_utc_time_sec = cur_utc_time_sec
        last_utc_time_sec = cur_utc_time_sec

dattype        = ROOT.TNamed("dattype", "PLATFORM PARAMETERS DATA")
version        = ROOT.TNamed("version", "PPD_Gen1M.py v1.0.0")
gentime        = ROOT.TNamed("gentime", datetime.now().isoformat() + "+0800")
ship_time_span = ROOT.TNamed("ship_time_span", str(first_ship_time_sec) + " => " + str(last_ship_time_sec))
utc_time_span  = ROOT.TNamed("utc_time_span",  str(first_utc_time_sec) + " => " + str(last_utc_time_sec))

t_file_out.cd()
t_tree_ppd.write()
dattype.Write()
version.Write()
gentime.Write()
ship_time_span.Write()
utc_time_span.Write()
t_file_out.close()