Exemple #1
0
def generate_templates(inputfilepath, series, filename_noise_psds, vec_r_lim,
                       mat_theta_lim, filename_templates):
    print('generate_templates')

    E_min = 0.
    E_max = 1E12

    V = get_noise_psds(filename_noise_psds)

    gen = TemplateGeneratorNxM(V, calc_r, calc_theta, E_min, E_max, vec_r_lim,
                               mat_theta_lim)

    dr = DataReader()
    dr.OpenFile(inputfilepath, series, 0)

    event_count = 0

    while dr.LoadEvent(trigger='Trigger'):
        gen.IncludeEvent(dr.GetTraces())
        event_count += 1

        if event_count % STEP_MONITOR == 1:
            print('Event', event_count)

    dr.CloseFile()

    templates = gen.GetTemplates()

    if type(templates) == list:
        map_bins_part = gen.GetMapBinsPart()
        save_templates_nxm(templates, E_min, E_max, map_bins_part,
                           filename_templates)

    gen.Draw(PATH + '/png')
Exemple #2
0
def generate_noise_psds(inputfilepath, series, filename_data_noise,
                        filename_noise_psds):
    print('generate_noise_psds')

    gen = NoisePSDGenerator(NUM_CHANNELS, NUM_BINS_T)

    dr = DataReader()
    dr.OpenFile(inputfilepath, series, 0)

    event_count = 0

    while dr.LoadEvent(trigger='BOR'):
        #from here
        trace = dr.GetTraces()
        dataS1 = np.asarray(trace)
        dataS1 = np.sum(dataS1, axis=0)
        if (np.mean(dataS1[0:3000]) > np.mean(dataS1[3000:10000])
                and np.mean(dataS1[0:3000]) > np.mean(dataS1[10000:15000])
                and np.mean(dataS1[0:3000]) > np.mean(dataS1[30000:32000])):
            continue
        if (np.mean(dataS1[0:3000]) > 1.05 * np.mean(dataS1[30000:32000])):
            continue
        if (1.05 * np.mean(dataS1[0:3000]) < np.mean(dataS1[30000:32000])):
            continue
        peaks, properties = find_peaks(dataS1.transpose(),
                                       prominence=1,
                                       width=20)
        if (len(peaks) == 0):
            #to here
            gen.IncludeEvent(dr.GetTraces())
            event_count += 1
            if event_count % STEP_MONITOR == 0:
                print('Event %d' % event_count)

    dr.CloseFile()

    noise_psds = gen.CalculateNoisePSDs()

    if type(noise_psds) == list:
        save_noise_psds(noise_psds, filename_noise_psds)

    gen.Draw(PATH + '/png')
Exemple #3
0
def preview_data(inputfilepath, series, filename_noise_psds, stat_estimate):
    print('preview_data')

    E_min = 0.
    E_max = 1E12

    V = get_noise_psds(filename_noise_psds)

    vd = vector_distribution()

    dr = DataReader()
    dr.OpenFile(inputfilepath, series)

    event_count = 0

    while dr.LoadEvent(trigger='Trigger'):
        S = dr.GetTraces()
        dataS = np.asarray(S)
        dataS = np.sum(dataS, axis=0)
        #after the conversion to A too small number, find_peaks doesn't work
        dataS = dataS * 1E7
        peaks, properties = find_peaks(dataS.transpose(),
                                       prominence=1,
                                       width=200)
        if (len(peaks) == 0 or len(peaks) > 1): continue

        #using scipy wiener filter
        noise_w = np.array([V[a][a] for a in range(len(S))])

        amps = [
            sum(wiener(S[a], mysize=75, noise=noise_w[a].real))
            for a in range(len(S))
        ]
        event_count += 1

        if event_count % STEP_MONITOR == 0:
            print('Event', event_count)

        if calc_r(amps) > 5: continue

        E = sum(amps)

        if E > E_min and E < E_max:
            vd.add(get_angle_std(calc_theta(amps)), calc_r(amps))

    dr.CloseFile()

    if vd.get_size() > 0:
        graph = TGraph(vd.get_size(), vd.get_array_x(), vd.get_array_y())

        lines = estimate_bins_part(stat_estimate, vd)
        for line in lines:
            line.SetLineColor(15)

        limits_x = [-math.pi, math.pi]
        limits_y = [
            min([line.GetY1() for line in lines]),
            max([line.GetY1() for line in lines])
        ]
        filename = PATH + '/png/preview_data.png'
        draw_graphs([graph], [2], 0.5, '#theta', 'r', limits_x, limits_y,
                    filename, lines)

        graph.Delete()
Exemple #4
0
def apply_filters(filename_noise_psds, filename_templates, inputfilepath,
                  series, filename_root):
    print('apply_filters')

    V = get_noise_psds(filename_noise_psds)
    templates, E_min, E_max, map_bins_part = get_templates_nxm(
        filename_templates)

    man = OFManagerNxM(DT, T_PRE, templates, V, calc_r, calc_theta, E_min,
                       E_max, map_bins_part)

    dr = DataReader()
    dr.OpenFile(inputfilepath, series, 0)

    tm_nxm = tree_manager('NxM')

    tm_nxm.Branch('t0')
    tm_nxm.Branch('chisq')
    tm_nxm.Branch('E')

    event_count = 0

    while dr.LoadEvent(trigger='Trigger'):
        S = dr.GetTraces()
        dataS = np.asarray(S)
        dataS = np.sum(dataS, axis=0)
        if (np.mean(dataS[0:3000]) > np.mean(dataS[3000:10000])
                and np.mean(dataS[0:3000]) > np.mean(dataS[10000:15000])
                and np.mean(dataS[0:3000]) > np.mean(dataS[30000:32000])):
            continue
        if (np.mean(dataS[0:3000]) > 1.05 * np.mean(dataS[30000:32000])):
            continue
        if (1.05 * np.mean(dataS[0:3000]) < np.mean(dataS[30000:32000])):
            continue
        dataS = dataS * 1E7
        peaks, properties = find_peaks(dataS.transpose(),
                                       prominence=1,
                                       width=20)
        width_half = peak_widths(dataS.transpose(), peaks, rel_height=0.5)

        if (len(peaks) == 0 or len(peaks) > 1 or any(width_half) > 2000):
            continue
        tmp_S = np.array(S)
        avg = np.mean(tmp_S[:, 0:5000], axis=1)
        for i in range(len(S)):
            S[i] = S[i] - avg[i]

        result = man.ProcessEvent(S)

        event_count += 1

        if event_count % STEP_MONITOR == 0:
            print('Event', event_count)
            man.Draw(PATH + '/png', event_count)

        if type(result) == dict:
            tm_nxm['t0'] = result['t0']
            tm_nxm['chisq'] = result['chisq']
            tm_nxm['E'] = result['E']

        else:
            tm_nxm['t0'] = -999999.0
            tm_nxm['chisq'] = -999999.0
            tm_nxm['E'] = -999999.0

        tm_nxm.Fill()

    dr.CloseFile()

    filepointer = TFile(filename_root, 'recreate')
    tm_nxm.Write()
    filepointer.Close()
Exemple #5
0
integral = []

# path to directory containing data files
filepath = '/gpfs/slac/staas/fs1/g/supercdms//data/CDMS/SLAC/R56/Raw/09190602_1927'

# specifies series to be analyzed
series = ["09190602_1927_F00" + str(i) + ".mid.gz" for i in range(70, 71)]

dr = DataReader()
dr.OpenFile(filepath, series, 0)

V = get_noise_psds('noise_psds.gz')

while dr.LoadEvent(trigger='Trigger'):
    S = dr.GetTraces()
    noise_w = np.array([V[a][a] for a in range(len(S))])
    dataS = np.asarray(S)
    dataS = np.sum(dataS, axis=0)
    if (np.mean(dataS[0:3000]) > np.mean(dataS[3000:10000])
            and np.mean(dataS[0:3000]) > np.mean(dataS[10000:15000])
            and np.mean(dataS[0:3000]) > np.mean(dataS[30000:32000])):
        continue
    if (np.mean(dataS[0:3000]) > 1.05 * np.mean(dataS[30000:32000])): continue
    if (1.05 * np.mean(dataS[0:3000]) < np.mean(dataS[30000:32000])): continue
    dataS = dataS * 1E7
    peaks, properties = find_peaks(dataS.transpose(), prominence=1, width=20)
    width_half = peak_widths(dataS.transpose(), peaks, rel_height=0.5)

    if (len(peaks) == 0 or len(peaks) > 1 or any(width_half) > 2000):
        continue