Exemple #1
0
def write_dl0_example(filename, data):
    S = Serializer(filename, overwrite=True)

    # Create table
    for container in data:
        S.write(container.dl0)

    # Print table
    print(S._writer.table)

    # Save table to disk
    S.save()
    return S
Exemple #2
0
def write_dl1_tel_example(filename, data):

    t38 = data[0].dl1.tel[38]

    S_cal = Serializer(filename, overwrite=True)
    S_cal.write(t38)

    print(S_cal._writer.table)

    # t11_1 = data[1].dl1.tel[11]
    # S_cal.write(t11_1) # This will not work because shape of data is different from tel to tel.

    S_cal.save()
    return S_cal
Exemple #3
0
def context_manager_example(filename, data):
    with Serializer(filename, mode='w') as writer:
        for container in data:
            print(container.dl0)
            writer.write(container.dl0)
            print(writer._writer.table)
    return 0
Exemple #4
0
def test_fits_context_manager(fits_file_name):
    with Serializer(filename=fits_file_name, format='fits', mode='w') as writer:
        for container in input_containers:
            writer.add_container(container.dl0)

    hdulist = fits.open(fits_file_name)
    assert hdulist[1].data["event_id"][0] == 408
    remove(fits_file_name)
Exemple #5
0
def test_pickle_iterator():
    serial = Serializer(filename=binary_filename, format='pickle',
                        mode='w')
    # append all events in input_containers list and pickle serializer
    for event in input_containers:
        serial.add_container(event)
    serial.close()

    read_containers = []
    reader = PickleSource(filename=binary_filename)
    for container in reader:
        read_containers.append(container)
    # test if number of read Container correspond to input
    assert len(read_containers) is len(input_containers)
    # test if 4th adc value of telescope 17 HI_GAIN are equals
    assert compare(input_containers[2], read_containers[2])
    reader.close()
    remove(binary_filename)
Exemple #6
0
def test_pickle_with_statement():
    with Serializer(filename=binary_filename, format='pickle', mode='w') as \
            containers_writer:
        for container in input_containers:
            containers_writer.add_container(container)
        containers_writer.close()

    read_containers = []
    with PickleSource(filename=binary_filename) as reader:
        for container in reader:
            read_containers.append(container)
    # test if number of read Container correspond to input
    assert len(read_containers) is len(input_containers)
    # test if 4th adc value of telescope 17 HI_GAIN are equals
    assert compare(input_containers[2], read_containers[2])
    remove(binary_filename)
Exemple #7
0
def test_fits_dl0():
    serial = Serializer(filename=fits_file_name, format='fits', mode='w')
    for container in input_containers:
        serial.add_container(container.dl0)
    serial.close()
    hdu = fits.open(fits_file_name)[1]
    assert hdu.data["event_id"][0] == 408
    assert hdu.data["event_id"][1] == 409
    assert hdu.data["event_id"][2] == 803
    assert hdu.data["run_id"][2] == 31964
    remove(fits_file_name)
Exemple #8
0
def write_dl0_example(filename, data):
    S = Serializer(filename, mode='w')

    # Create table
    for container in data:
        S.write(container.dl0)

    # Print table
    print(S._writer.table)

    # Save table to disk
    S.save()
    return S
Exemple #9
0
def write_dl1_tel_example(filename, data):

    t38 = data[0].dl1.tel[38]

    S_cal = Serializer(filename, overwrite=True)
    S_cal.write(t38)

    print(S_cal._writer.table)

    # t11_1 = data[1].dl1.tel[11]
    # S_cal.write(t11_1) # This will not work because shape of data is different from tel to tel.

    S_cal.save()
    return S_cal
Exemple #10
0
def test_pickle_iterator(binary_filename):
    serial = Serializer(filename=binary_filename, format='pickle', mode='w')
    # append all events in input_containers list and pickle serializer
    for event in input_containers:
        serial.add_container(event)
    serial.close()

    read_containers = []
    reader = PickleSource(filename=binary_filename)
    for container in reader:
        read_containers.append(container)
    # test if number of read Container correspond to input
    assert len(read_containers) is len(input_containers)
    # test if 4th adc value of telescope 17 HI_GAIN are equals
    assert compare(input_containers[2], read_containers[2])
    reader.close()
    remove(binary_filename)
Exemple #11
0
def save_to_pickle_gz(event_stream, file, overwrite=False, max_events=None):
    if isfile(file):
        if overwrite:
            print('remove old', file, 'file')
            remove(file)
        else:
            print(file, 'exist, exiting...')
            return
    writer = Serializer(filename=file, format='pickle', mode='w')
    counter_events = 0
    for event in event_stream:
        writer.add_container(event)
        counter_events += 1

        if max_events is not None and counter_events >= max_events:
            break

    writer.close()
Exemple #12
0
def test_exclusive_mode():
    serial = Serializer(filename=fits_file_name, format='fits', mode='w')
    for container in input_containers:
        serial.add_container(container.dl0)
    serial.close()
    # Try to write to fits_file_name in exclusive mode
    with pytest.raises(OSError):
        serial = Serializer(filename=fits_file_name, format='fits', mode='x')
        serial.add_container(input_containers[2].dl0)
        serial.close()
    remove(fits_file_name)
Exemple #13
0
def main_pipeline(files,
                  aux_basepath,
                  max_events,
                  dark_filename,
                  integral_width,
                  debug,
                  hillas_filename,
                  parameters_filename,
                  picture_threshold,
                  boundary_threshold,
                  template_filename,
                  saturation_threshold,
                  threshold_pulse,
                  bad_pixels=None,
                  disable_bar=False):
    # get configuration
    with open(parameters_filename) as file:
        calibration_parameters = yaml.load(file)
    if bad_pixels is None:
        bad_pixels = get_bad_pixels(calib_file=parameters_filename,
                                    dark_histo=dark_filename,
                                    plot=None)
    pulse_template = NormalizedPulseTemplate.load(template_filename)
    pulse_area = pulse_template.integral() * u.ns
    ratio = pulse_template.compute_charge_amplitude_ratio(
        integral_width=integral_width, dt_sampling=4)  # ~ 0.24
    gain = np.array(calibration_parameters['gain'])  # ~ 20 LSB / p.e.
    gain_amplitude = gain * ratio
    crosstalk = np.array(calibration_parameters['mu_xt'])
    bias_resistance = 10 * 1E3 * u.Ohm  # 10 kOhm
    cell_capacitance = 50 * 1E-15 * u.Farad  # 50 fF
    geom = DigiCam.geometry
    dark_histo = Histogram1D.load(dark_filename)
    dark_baseline = dark_histo.mean()

    # define pipeline
    events = calibration_event_stream(files,
                                      max_events=max_events,
                                      disable_bar=disable_bar)
    events = add_slow_data_calibration(
        events,
        basepath=aux_basepath,
        aux_services=('DriveSystem', 'DigicamSlowControl', 'MasterSST1M',
                      'SafetyPLC', 'PDPSlowControl'))
    events = baseline.fill_dark_baseline(events, dark_baseline)
    events = baseline.fill_digicam_baseline(events)
    events = tagging.tag_burst_from_moving_average_baseline(events)
    events = baseline.compute_baseline_shift(events)
    events = baseline.subtract_baseline(events)
    events = filters.filter_clocked_trigger(events)
    events = baseline.compute_nsb_rate(events, gain_amplitude, pulse_area,
                                       crosstalk, bias_resistance,
                                       cell_capacitance)
    events = baseline.compute_gain_drop(events, bias_resistance,
                                        cell_capacitance)
    events = peak.find_pulse_with_max(events)
    events = charge.compute_dynamic_charge(
        events,
        integral_width=integral_width,
        saturation_threshold=saturation_threshold,
        threshold_pulse=threshold_pulse,
        debug=debug,
        pulse_tail=False,
    )
    events = charge.compute_photo_electron(events, gains=gain)
    events = charge.interpolate_bad_pixels(events, geom, bad_pixels)
    events = cleaning.compute_tailcuts_clean(
        events,
        geom=geom,
        overwrite=True,
        picture_thresh=picture_threshold,
        boundary_thresh=boundary_threshold,
        keep_isolated_pixels=False)
    events = cleaning.compute_boarder_cleaning(events, geom,
                                               boundary_threshold)
    events = cleaning.compute_dilate(events, geom)
    events = image.compute_hillas_parameters(events, geom)
    events = charge.compute_sample_photo_electron(events, gain_amplitude)
    events = cleaning.compute_3d_cleaning(events,
                                          geom,
                                          n_sample=50,
                                          threshold_sample_pe=20,
                                          threshold_time=2.1 * u.ns,
                                          threshold_size=0.005 * u.mm)
    # create pipeline output file
    output_file = Serializer(hillas_filename, mode='w', format='fits')
    data_to_store = PipelineOutputContainer()
    for event in events:
        if debug:
            print(event.hillas)
            print(event.data.nsb_rate)
            print(event.data.gain_drop)
            print(event.data.baseline_shift)
            print(event.data.border)
            plot_array_camera(np.max(event.data.adc_samples, axis=-1))
            plot_array_camera(
                np.nanmax(event.data.reconstructed_charge, axis=-1))
            plot_array_camera(event.data.cleaning_mask.astype(float))
            plot_array_camera(event.data.reconstructed_number_of_pe)
            plt.show()
        # fill container
        data_to_store.local_time = event.data.local_time
        data_to_store.event_type = event.event_type
        data_to_store.event_id = event.event_id
        data_to_store.az = event.slow_data.DriveSystem.current_position_az
        data_to_store.el = event.slow_data.DriveSystem.current_position_el

        r = event.hillas.r
        phi = event.hillas.phi
        psi = event.hillas.psi
        alpha = compute_alpha(phi.value, psi.value) * psi.unit
        data_to_store.alpha = alpha
        data_to_store.miss = compute_miss(r=r.value, alpha=alpha.value)
        data_to_store.miss = data_to_store.miss * r.unit
        data_to_store.baseline = np.mean(event.data.digicam_baseline)
        data_to_store.nsb_rate = np.mean(event.data.nsb_rate)
        temp_crate1 = event.slow_data.DigicamSlowControl.Crate1_T
        temp_crate2 = event.slow_data.DigicamSlowControl.Crate2_T
        temp_crate3 = event.slow_data.DigicamSlowControl.Crate3_T
        temp_digicam = np.array(
            np.hstack([temp_crate1, temp_crate2, temp_crate3]))
        temp_digicam_mean = np.mean(temp_digicam[np.logical_and(
            temp_digicam > 0, temp_digicam < 60)])
        data_to_store.digicam_temperature = temp_digicam_mean
        temp_sector1 = event.slow_data.PDPSlowControl.Sector1_T
        temp_sector2 = event.slow_data.PDPSlowControl.Sector2_T
        temp_sector3 = event.slow_data.PDPSlowControl.Sector3_T
        temp_pdp = np.array(
            np.hstack([temp_sector1, temp_sector2, temp_sector3]))
        temp_pdp_mean = np.mean(temp_pdp[np.logical_and(
            temp_pdp > 0, temp_pdp < 60)])
        data_to_store.pdp_temperature = temp_pdp_mean
        target_radec = event.slow_data.MasterSST1M.target_radec
        data_to_store.target_ra = target_radec[0]
        data_to_store.target_dec = target_radec[1]
        status_leds = event.slow_data.SafetyPLC.SPLC_CAM_Status
        # bit 8 of status_LEDs is about on/off, bit 9 about blinking
        data_to_store.pointing_leds_on = bool((status_leds & 1 << 8) >> 8)
        data_to_store.pointing_leds_blink = bool((status_leds & 1 << 9) >> 9)
        hv_sector1 = event.slow_data.PDPSlowControl.Sector1_HV
        hv_sector2 = event.slow_data.PDPSlowControl.Sector2_HV
        hv_sector3 = event.slow_data.PDPSlowControl.Sector3_HV
        hv_pdp = np.array(np.hstack([hv_sector1, hv_sector2, hv_sector3]),
                          dtype=bool)
        data_to_store.all_hv_on = np.all(hv_pdp)
        ghv_sector1 = event.slow_data.PDPSlowControl.Sector1_GHV
        ghv_sector2 = event.slow_data.PDPSlowControl.Sector2_GHV
        ghv_sector3 = event.slow_data.PDPSlowControl.Sector3_GHV
        ghv_pdp = np.array(np.hstack([ghv_sector1, ghv_sector2, ghv_sector3]),
                           dtype=bool)
        data_to_store.all_ghv_on = np.all(ghv_pdp)
        is_on_source = bool(event.slow_data.DriveSystem.is_on_source)
        data_to_store.is_on_source = is_on_source
        is_tracking = bool(event.slow_data.DriveSystem.is_tracking)
        data_to_store.is_tracking = is_tracking
        data_to_store.shower = bool(event.data.shower)
        data_to_store.border = bool(event.data.border)
        data_to_store.burst = bool(event.data.burst)
        data_to_store.saturated = bool(event.data.saturated)
        for key, val in event.hillas.items():
            data_to_store[key] = val
        output_file.add_container(data_to_store)
    try:
        output_file.close()
        print(hillas_filename, 'created.')
    except ValueError:
        print('WARNING: no data to save,', hillas_filename, 'not created.')
Exemple #14
0
def data_quality(
        files, dark_filename, time_step, fits_filename, load_files,
        histo_filename, rate_plot_filename, baseline_plot_filename,
        nsb_plot_filename, parameters_filename, template_filename,
        aux_basepath, threshold_sample_pe=20.,
        bias_resistance=1e4 * u.Ohm, cell_capacitance=5e-14 * u.Farad,
        disable_bar=False, aux_services=('DriveSystem',)

):
    input_dir = np.unique([os.path.dirname(file) for file in files])
    if len(input_dir) > 1:
        raise AttributeError("input files must be from the same directories")
    input_dir = input_dir[0]
    if aux_basepath.lower() == "search":
        aux_basepath = input_dir.replace('/raw/', '/aux/')
        print("auxiliary files are expected in", aux_basepath)
    with open(parameters_filename) as file:
        calibration_parameters = yaml.load(file)

    pulse_template = NormalizedPulseTemplate.load(template_filename)
    pulse_area = pulse_template.integral() * u.ns
    gain_integral = np.array(calibration_parameters['gain'])

    charge_to_amplitude = pulse_template.compute_charge_amplitude_ratio(7, 4)
    gain_amplitude = gain_integral * charge_to_amplitude
    crosstalk = np.array(calibration_parameters['mu_xt'])
    pixel_id = np.arange(1296)
    n_pixels = len(pixel_id)
    dark_histo = Histogram1D.load(dark_filename)
    dark_baseline = dark_histo.mean()
    if not load_files:
        events = calibration_event_stream(files, disable_bar=disable_bar)
        events = add_slow_data_calibration(
            events, basepath=aux_basepath, aux_services=aux_services
        )
        events = fill_digicam_baseline(events)
        events = fill_dark_baseline(events, dark_baseline)
        events = subtract_baseline(events)
        events = compute_baseline_shift(events)
        events = compute_nsb_rate(
            events, gain_amplitude, pulse_area, crosstalk, bias_resistance,
            cell_capacitance
        )
        events = compute_gain_drop(events, bias_resistance, cell_capacitance)
        events = compute_sample_photo_electron(events, gain_amplitude)
        events = tag_burst_from_moving_average_baseline(
            events, n_previous_events=100, threshold_lsb=5
        )
        events = compute_3d_cleaning(events, geom=DigiCam.geometry,
                                     threshold_sample_pe=threshold_sample_pe)
        init_time = 0
        baseline = 0
        count = 0
        shower_count = 0
        az = 0
        el = 0
        container = DataQualityContainer()
        file = Serializer(fits_filename, mode='w', format='fits')
        baseline_histo = Histogram1D(
            data_shape=(n_pixels,),
            bin_edges=np.arange(4096)
        )
        for i, event in enumerate(events):
            new_time = event.data.local_time
            if init_time == 0:
                init_time = new_time
            count += 1
            baseline += np.mean(event.data.digicam_baseline)
            az += event.slow_data.DriveSystem.current_position_az
            el += event.slow_data.DriveSystem.current_position_el
            time_diff = new_time - init_time
            if event.data.shower:
                shower_count += 1
            baseline_histo.fill(event.data.digicam_baseline.reshape(-1, 1))
            if time_diff > time_step and i > 0:
                trigger_rate = count / time_diff
                shower_rate = shower_count / time_diff
                baseline = baseline / count
                az = az / count
                el = el / count
                container.trigger_rate = trigger_rate
                container.baseline = baseline
                container.time = (new_time + init_time) / 2
                container.shower_rate = shower_rate
                container.burst = event.data.burst
                nsb_rate = event.data.nsb_rate
                container.nsb_rate = np.nanmean(nsb_rate).value
                container.current_position_az = az
                container.current_position_el = el
                baseline = 0
                count = 0
                init_time = 0
                shower_count = 0
                az = 0
                el = 0
                file.add_container(container)
        output_path = os.path.dirname(histo_filename)
        if not os.path.exists(output_path):
            os.makedirs(output_path)
        baseline_histo.save(histo_filename)
        print(histo_filename, 'created.')
        file.close()
        print(fits_filename, 'created.')

    data = Table.read(fits_filename, format='fits')
    data = data.to_pandas()
    data['time'] = pd.to_datetime(data['time'], utc=True)
    data = data.set_index('time')

    if rate_plot_filename is not None:
        fig1 = plt.figure()
        ax = plt.gca()
        plt.xticks(rotation=70)
        plt.plot(data['trigger_rate']*1E9, '.', label='trigger rate')
        plt.plot(data['shower_rate']*1E9, '.', label='shower_rate')
        plt.ylabel('rate [Hz]')
        plt.legend({'trigger rate', 'shower rate'})
        xlim = plt.xlim()
        plt.xlim(xlim[0] - 1e-3, xlim[1] + 1e-3)  # extra min on the sides
        if rate_plot_filename == "show":
            plt.show()
        else:
            output_path = os.path.dirname(rate_plot_filename)
            if not (output_path == '' or os.path.exists(output_path)):
                os.makedirs(output_path)
            plt.savefig(rate_plot_filename)
        plt.close(fig1)

    if baseline_plot_filename is not None:
        fig2 = plt.figure(figsize=(8, 6))
        ax = plt.gca()
        data_burst = data[data['burst']]
        data_good = data[~data['burst']]
        plt.xticks(rotation=70)
        plt.plot(data_good['baseline'], '.', label='good', ms=2)
        plt.plot(data_burst['baseline'], '.', label='burst', ms=2)
        plt.ylabel('Baseline [LSB]')
        xlim = plt.xlim()
        plt.xlim(xlim[0] - 1e-3, xlim[1] + 1e-3)  # extra min on the sides
        if rate_plot_filename == "show":
            plt.show()
        else:
            output_path = os.path.dirname(baseline_plot_filename)
            if not (output_path == '' or os.path.exists(output_path)):
                os.makedirs(output_path)
            plt.savefig(baseline_plot_filename)
        plt.close(fig2)

    if nsb_plot_filename is not None:
        fig3 = plt.figure()
        ax = fig3.add_subplot(111)
        data.plot(y='nsb_rate', ax=ax)
        ax.set_ylabel('$f_{NSB}$ [GHz]')

        if nsb_plot_filename == "show":
            plt.show()
        else:
            fig3.savefig(nsb_plot_filename)
        plt.close(fig3)

    return
Exemple #15
0
def context_manager_example(filename, data):
    with Serializer(filename, overwrite=True) as writer:
        for container in data:
            print(container.dl0)
            writer.write(container.dl0)
            print(writer._writer.table)