Exemple #1
0
    def test_cluster_correlation(
            self):  # Check the cluster correlation function
        dut_alignment.correlate_cluster(input_cluster_files=self.data_files,
                                        output_correlation_file=os.path.join(
                                            self.output_folder,
                                            'Correlation.h5'),
                                        n_pixels=self.n_pixels,
                                        pixel_size=self.pixel_size)
        data_equal, error_msg = test_tools.compare_h5_files(
            os.path.join(tests_data_folder, 'Correlation_result.h5'),
            os.path.join(self.output_folder, 'Correlation.h5'),
            exact=True)
        self.assertTrue(data_equal, msg=error_msg)

        # Retest with tiny chunk size to force chunked correlation
        dut_alignment.correlate_cluster(input_cluster_files=self.data_files,
                                        output_correlation_file=os.path.join(
                                            self.output_folder,
                                            'Correlation_2.h5'),
                                        n_pixels=self.n_pixels,
                                        pixel_size=self.pixel_size,
                                        chunk_size=293)
        data_equal, error_msg = test_tools.compare_h5_files(
            os.path.join(tests_data_folder, 'Correlation_result.h5'),
            os.path.join(self.output_folder, 'Correlation_2.h5'),
            exact=True)
        self.assertTrue(data_equal, msg=error_msg)
Exemple #2
0
def run_analysis():
    # The location of the example data files, one file per DUT
    data_files = [
        r'H:\Testbeam_05052016_LFCMOS\Telescope_data\kartel-converted-synchronized_plane0.h5',
        r'H:\Testbeam_05052016_LFCMOS\Telescope_data\kartel-converted-synchronized_plane1.h5',
        r'H:\Testbeam_05052016_LFCMOS\Telescope_data\kartel-converted-synchronized_plane2.h5',
        r'H:\Testbeam_05052016_LFCMOS\Telescope_data\fe_dut-converted-synchronized_plane0.h5',
        r'H:\Testbeam_05052016_LFCMOS\Telescope_data\fe_dut-converted-synchronized_plane1.h5',
        r'H:\Testbeam_05052016_LFCMOS\Telescope_data\kartel-converted-synchronized_plane3.h5',
        r'H:\Testbeam_05052016_LFCMOS\Telescope_data\kartel-converted-synchronized_plane4.h5',
        r'H:\Testbeam_05052016_LFCMOS\Telescope_data\kartel-converted-synchronized_plane5.h5'
    ]  # The first device is the reference for the coordinate system

    # Pixel dimesions and matrix size of the DUTs
    pixel_size = [(18.4, 18.4), (18.4, 18.4), (18.4, 18.4), (250, 50),
                  (250, 50), (18.4, 18.4), (18.4, 18.4),
                  (18.4, 18.4)]  # (Column, row) pixel pitch in um
    n_pixels = [(1152, 576), (1152, 576), (1152, 576), (80, 336), (80, 336),
                (1152, 576), (1152, 576),
                (1152, 576)]  # (Column, row) dimensions of the pixel matrix
    z_positions = [
        0., 20000, 40000, 40000 + 101000, 40000 + 101000 + 23000, 247000,
        267000, 287000
    ]  # in um
    dut_names = ("Tel 0", "Tel 1", "Tel 2", "LFCMOS3", "FEI4 Reference",
                 "Tel 3", "Tel 4", "Tel 5")  # Friendly names for plotting

    # Folder where all output data and plots are stored
    output_folder = r'H:\Testbeam_05052016_LFCMOS\output'

    # The following shows a complete test beam analysis by calling the seperate function in correct order

    # Remove hot pixel, only needed for devices with noisy pixels (here: Mimosa 26)
    # A pool of workers to remove the noisy pixels in all files in parallel
    threshold = (2, 2, 2, 10, 10, 2, 2, 2)
    kwargs = [{
        'input_hits_file': data_files[i],
        'n_pixel': n_pixels[i],
        'pixel_size': pixel_size[i],
        'threshold': threshold[i],
        'dut_name': dut_names[i]
    } for i in range(0, len(data_files))]
    pool = Pool()
    for kwarg in kwargs:
        hit_analysis.remove_noisy_pixels(**kwarg)
    pool.close()
    pool.join()

    # Cluster hits off all DUTs
    # A pool of workers to cluster hits in all files in parallel
    kwargs = [{
        'input_hits_file': data_files[i][:-3] + '_noisy_pixels.h5',
        'max_x_distance': 3,
        'max_y_distance': 3,
        'max_time_distance': 2,
        'max_cluster_hits': 5000,
        'dut_name': dut_names[i]
    } for i in range(0, len(data_files))]
    pool = Pool()
    for kwarg in kwargs:
        pool.apply_async(hit_analysis.cluster_hits, kwds=kwarg)
    pool.close()
    pool.join()

    # Correlate the row / column of each DUT
    dut_alignment.correlate_cluster(input_cluster_files=[
        data_file[:-3] + '_noisy_pixels_cluster.h5' for data_file in data_files
    ],
                                    output_correlation_file=os.path.join(
                                        output_folder, 'Correlation.h5'),
                                    n_pixels=n_pixels,
                                    pixel_size=pixel_size,
                                    dut_names=dut_names)

    # Create prealignment relative to the first DUT from the correlation data
    dut_alignment.prealignment(
        input_correlation_file=os.path.join(output_folder, 'Correlation.h5'),
        output_alignment_file=os.path.join(output_folder, 'Alignment.h5'),
        z_positions=z_positions,
        pixel_size=pixel_size,
        dut_names=dut_names,
        fit_background=True,
        non_interactive=False
    )  # Tries to find cuts automatically; deactivate to do this manualy

    # Merge the cluster tables to one merged table aligned at the event number
    dut_alignment.merge_cluster_data(input_cluster_files=[
        data_file[:-3] + '_noisy_pixels_cluster.h5' for data_file in data_files
    ],
                                     output_merged_file=os.path.join(
                                         output_folder, 'Merged.h5'),
                                     pixel_size=pixel_size)

    # Apply the prealignment to the merged cluster table to create tracklets
    dut_alignment.apply_alignment(
        input_hit_file=os.path.join(output_folder, 'Merged.h5'),
        input_alignment=os.path.join(output_folder, 'Alignment.h5'),
        output_hit_aligned_file=os.path.join(output_folder,
                                             'Tracklets_prealigned.h5'),
        force_prealignment=True)

    # Find tracks from the prealigned tracklets and stores the with quality indicator into track candidates table
    track_analysis.find_tracks(
        input_tracklets_file=os.path.join(output_folder,
                                          'Tracklets_prealigned.h5'),
        input_alignment_file=os.path.join(output_folder, 'Alignment.h5'),
        output_track_candidates_file=os.path.join(
            output_folder, 'TrackCandidates_prealignment.h5'))

    # Select tracks with a hit in the time reference (DUT 4) and all position devices to increase analysis speed due to data reduction
    data_selection.select_hits(hit_file=os.path.join(
        output_folder, 'TrackCandidates_prealignment.h5'),
                               track_quality=0b11110111,
                               track_quality_mask=0b11110111)

    # Do an alignment step with the track candidates, corrects rotations and is therefore much more precise than simple prealignment
    dut_alignment.alignment(
        input_track_candidates_file=os.path.join(
            output_folder, 'TrackCandidates_prealignment_reduced.h5'),
        input_alignment_file=os.path.join(output_folder, 'Alignment.h5'),
        # Order of combinaions of planes to align, one should start with high resoultion planes (here: telescope planes)
        align_duts=[
            [0, 1, 2, 5, 6, 7],  # align the telescope planes first
            [4],  # align the time reference after the telescope alignment
            [3]
        ],  # align the DUT last and not with the reference since it is rather small and would make the time reference alinmnt worse
        # The DUTs to be used in the fit, always just the high resolution Mimosa26 planes used
        selection_fit_duts=[0, 1, 2, 5, 6, 7],
        # The DUTs to be required to have a hit for the alignment
        selection_hit_duts=[
            [0, 1, 2, 4, 5, 6, 7],  # Take tracks with time reference hit
            [0, 1, 2, 4, 5, 6, 7],  # Take tracks with time reference hit
            [0, 1, 2, 3, 4, 5, 6, 7]
        ],  # Also require hit in the small DUT
        # The required track quality per alignment step and DUT
        selection_track_quality=[
            [1, 1, 1, 0, 1, 1,
             1],  # Do not require a good hit in the time refernce
            [1, 1, 1, 1, 1, 1, 1],
            [1, 1, 1, 1, 0, 1, 1, 1]
        ],  # Do not require a good hit in the small DUT
        initial_rotation=[
            [0., 0., 0.],
            [0., 0., 0.],
            [0., 0., 0.],
            # Devices 3, 4 are heavily rotated (inverted), this is not implemented now
            # Thus one has to set the correct rotation angles here manually
            [np.pi - 0.05, -0.05, -0.005],
            [np.pi - 0.01, -0.02, -0.0005],
            [0., 0, 0.],
            [0., 0, 0.],
            [0., 0, 0.]
        ],
        initial_translation=[
            [0., 0, 0.],
            [0., 0, 0.],
            [0., 0, 0.],
            # Devices 3, 4 are heavily rotated (inverted), this is not implemented now
            # Thus one has to set the correct positions here manually
            [11540, 18791, 0.],
            [710., 9851., 0.],
            [0., 0, 0.],
            [0., 0, 0.],
            [0., 0, 0.]
        ],
        n_pixels=n_pixels,
        use_n_tracks=
        200000,  # Do the alignment only on a subset of data, needed for reasonable run time
        pixel_size=pixel_size)

    # Apply new alignment to data
    # Revert alignment from track candidates. Usually one would just apply the alignment to the merged data.
    # Due to the large beam angle track finding fails on aligned data. Thus rely on the found tracks from prealignment.
    dut_alignment.apply_alignment(
        input_hit_file=os.path.join(output_folder,
                                    'TrackCandidates_prealignment_reduced.h5'),
        input_alignment=os.path.join(output_folder, 'Alignment.h5'),
        output_hit_aligned_file=os.path.join(
            output_folder, 'Merged_small.h5'
        ),  # This is the new not aligned but preselected merged data file to apply (pre-) alignment on
        inverse=True,
        force_prealignment=True)

    # Apply the alignment to the merged cluster table to create tracklets
    dut_alignment.apply_alignment(
        input_hit_file=os.path.join(output_folder, 'Merged_small.h5'),
        input_alignment=os.path.join(output_folder, 'Alignment.h5'),
        output_hit_aligned_file=os.path.join(output_folder,
                                             'TrackCandidates.h5'))

    # Fit track using alignment
    track_analysis.fit_tracks(
        input_track_candidates_file=os.path.join(output_folder,
                                                 'TrackCandidates.h5'),
        input_alignment_file=os.path.join(output_folder, 'Alignment.h5'),
        output_tracks_file=os.path.join(output_folder, 'Tracks.h5'),
        selection_hit_duts=[0, 1, 2, 4, 5, 6, 7],
        selection_fit_duts=[0, 1, 2, 5, 6, 7],
        selection_track_quality=1
    )  # Take all tracks with good hits, do not care about time reference hit quality

    # Create unconstrained residuals with aligned data
    result_analysis.calculate_residuals(
        input_tracks_file=os.path.join(output_folder, 'Tracks.h5'),
        input_alignment_file=os.path.join(output_folder, 'Alignment.h5'),
        output_residuals_file=os.path.join(output_folder, 'Residuals.h5'),
        n_pixels=n_pixels,
        pixel_size=pixel_size)

    # Calculate efficiency with aligned data
    result_analysis.calculate_efficiency(
        input_tracks_file=os.path.join(output_folder, 'Tracks.h5'),
        input_alignment_file=os.path.join(output_folder, 'Alignment.h5'),
        output_pdf=os.path.join(output_folder, 'Efficiency.pdf'),
        bin_size=(10, 10),
        use_duts=[3],
        sensor_size=[(20000, 10000), (20000, 10000), (20000, 10000),
                     (20000, 20000), (20000, 10000), (20000, 10000),
                     (20000, 10000)])

    # Fit tracks using prealignmend
    track_analysis.fit_tracks(
        input_track_candidates_file=os.path.join(
            output_folder, 'TrackCandidates_prealignment_reduced.h5'),
        input_alignment_file=os.path.join(output_folder, 'Alignment.h5'),
        output_tracks_file=os.path.join(output_folder,
                                        'Tracks_prealignment.h5'),
        force_prealignment=True,
        selection_hit_duts=[0, 1, 2, 4, 5, 6, 7],
        selection_fit_duts=[0, 1, 2, 5, 6, 7],
        selection_track_quality=1
    )  # Take all tracks with good hits, do not care about time reference hit quality

    # Create unconstrained residuals with prealigned data
    result_analysis.calculate_residuals(
        input_tracks_file=os.path.join(output_folder,
                                       'Tracks_prealignment.h5'),
        input_alignment_file=os.path.join(output_folder, 'Alignment.h5'),
        output_residuals_file=os.path.join(output_folder,
                                           'Residuals_prealignment.h5'),
        force_prealignment=True,
        n_pixels=n_pixels,
        pixel_size=pixel_size)

    # Create efficiency plot with prealigned data
    result_analysis.calculate_efficiency(
        input_tracks_file=os.path.join(output_folder,
                                       'Tracks_prealignment.h5'),
        input_alignment_file=os.path.join(output_folder, 'Alignment.h5'),
        output_pdf=os.path.join(output_folder, 'Efficiency_prealignment.pdf'),
        force_prealignment=True,
        bin_size=(10, 10),
        use_duts=[3],
        sensor_size=[(20000, 10000), (20000, 10000), (20000, 10000),
                     (20000, 20000), (20000, 10000), (20000, 10000),
                     (20000, 10000)])
def run_analysis(n_events):
    # Simulate 100000 events with std. settings
    simulate_data = SimulateData(
        random_seed=0)  # Start simulator with random seed 0

    # All simulator std. settings are listed here and can be changed
    # General setup
    simulate_data.n_duts = 6  # Number of DUTs in the simulation
    simulate_data.z_positions = [
        i * 10000 for i in range(simulate_data.n_duts)
    ]  # in um; std: every 10 cm
    simulate_data.offsets = [(-10000 + 111 * 0., -10000 + 111 * 0.)
                             for i in range(simulate_data.n_duts)
                             ]  # in x, y in mu
    simulate_data.rotations = [
        (0, 0, 0)
    ] * simulate_data.n_duts  # in rotation around x, y, z axis in Rad
    simulate_data.temperature = 300  # Temperature in Kelvin, needed for charge sharing calculation
    # Beam related settings
    simulate_data.beam_position = (
        0, 0)  # Average beam position in x, y at z = 0 in mu
    simulate_data.beam_position_sigma = (2000, 2000)  # in x, y at z = 0 in mu
    simulate_data.beam_momentum = 3200  # Beam momentum in MeV
    simulate_data.beam_angle = 0  # Average beam angle in theta at z = 0 in mRad
    simulate_data.beam_angle_sigma = 2  # Deviation from the average beam angle in theta at z = 0 in mRad
    simulate_data.tracks_per_event = 3  # Average number of tracks per event
    simulate_data.tracks_per_event_sigma = 1  # Deviation from the average number of tracks, makes no track per event possible!
    # Device specific settings
    simulate_data.dut_bias = [
        50
    ] * simulate_data.n_duts  # Sensor bias voltage for each device in volt
    simulate_data.dut_thickness = [
        200
    ] * simulate_data.n_duts  # Sensor thickness for each device in um
    simulate_data.dut_threshold = [
        0.
    ] * simulate_data.n_duts  # Detection threshold for each device in electrons, influences efficiency!
    simulate_data.dut_noise = [
        0.
    ] * simulate_data.n_duts  # Noise for each device in electrons
    simulate_data.dut_pixel_size = [
        (50, 18.4)
    ] * simulate_data.n_duts  # Pixel size for each device in x / y in um
    simulate_data.dut_n_pixel = [
        (400, 1100)
    ] * simulate_data.n_duts  # Number of pixel for each device in x / y
    simulate_data.dut_efficiencies = [
        1.
    ] * simulate_data.n_duts  # Efficiency for each device from 0. to 1. for hits above threshold
    simulate_data.dut_material_budget = [
        simulate_data.dut_thickness[i] * 1e-4 / 9.370
        for i in range(simulate_data.n_duts)
    ]  # The effective material budget (sensor + passive compoonents) given in total material distance / total radiation length (https://cdsweb.cern.ch/record/1279627/files/PH-EP-Tech-Note-2010-013.pdf); 0 means no multiple scattering; std. setting is the sensor thickness made of silicon as material budget
    # Digitization settings
    simulate_data.digitization_charge_sharing = True
    simulate_data.digitization_shuffle_hits = True  # Shuffle hit per event to challenge track finding
    simulate_data.digitization_pixel_discretization = True  # Translate hit position on DUT plane to channel indices (column / row)

    # Create the data
    output_folder = 'simulation'  # Define a folder where all output data and plots are stored
    if not os.path.exists(output_folder):
        os.makedirs(output_folder)
    simulate_data.create_data_and_store(os.path.join(output_folder,
                                                     'simulated_data'),
                                        n_events=n_events)

    # The simulated data files, one file per DUT
    data_files = [
        os.path.join(output_folder, r'simulated_data_DUT%d.h5' % i)
        for i in range(simulate_data.n_duts)
    ]

    # The following shows a complete test beam analysis by calling the separate function in correct order

    # Cluster hits off all DUTs
    kwargs = [{
        'input_hits_file': data_files[i],
        'max_x_distance': 1,
        'max_y_distance': 1,
        'max_time_distance': 2,
        'max_hit_charge': 2**16,
        "dut_name": data_files[i]
    } for i in range(len(data_files))]
    pool = Pool()
    for kwarg in kwargs:
        pool.apply_async(hit_analysis.cluster_hits, kwds=kwarg)
    pool.close()
    pool.join()

    # Correlate the row / column of each DUT
    dut_alignment.correlate_cluster(input_cluster_files=[
        data_file[:-3] + '_cluster.h5' for data_file in data_files
    ],
                                    output_correlation_file=os.path.join(
                                        output_folder, 'Correlation.h5'),
                                    n_pixels=simulate_data.dut_n_pixel,
                                    pixel_size=simulate_data.dut_pixel_size)

    # Create alignment data for the DUT positions to the first DUT from the correlation data
    # When needed, set offset and error cut for each DUT as list of tuples
    dut_alignment.prealignment(
        input_correlation_file=os.path.join(output_folder, 'Correlation.h5'),
        output_alignment_file=os.path.join(output_folder, 'Alignment.h5'),
        z_positions=simulate_data.z_positions,
        pixel_size=simulate_data.dut_pixel_size,
        no_fit=
        True,  # Deactivate if you have a large dataset, enhances alignment slightly
        fit_background=False
        if not (simulate_data.tracks_per_event
                or simulate_data.tracks_per_event_sigma) else True,
        non_interactive=True
    )  # Tries to find cuts automatically; deactivate to do this manualy

    # Correct all DUT hits via alignment information and merge the cluster tables to one tracklets table aligned at the event number
    dut_alignment.merge_cluster_data(input_cluster_files=[
        data_file[:-3] + '_cluster.h5' for data_file in data_files
    ],
                                     output_merged_file=os.path.join(
                                         output_folder, 'Merged.h5'),
                                     n_pixels=simulate_data.dut_n_pixel,
                                     pixel_size=simulate_data.dut_pixel_size)

    dut_alignment.apply_alignment(
        input_hit_file=os.path.join(output_folder, 'Merged.h5'),
        input_alignment=os.path.join(output_folder, 'Alignment.h5'),
        output_hit_aligned_file=os.path.join(output_folder,
                                             'Tracklets_prealigned.h5'),
        force_prealignment=True
    )  # If there is already an alignment info in the alignment file this has to be set)

    # Find tracks from the tracklets and stores the with quality indicator into track candidates table
    track_analysis.find_tracks(
        input_tracklets_file=os.path.join(output_folder,
                                          'Tracklets_prealigned.h5'),
        input_alignment_file=os.path.join(output_folder, 'Alignment.h5'),
        output_track_candidates_file=os.path.join(
            output_folder, 'TrackCandidates_prealigned.h5'),
        min_cluster_distance=False
    )  # If there is already an alignment info in the alignment file this has to be set)

    # Fit the track candidates and create new track table
    track_analysis.fit_tracks(
        input_track_candidates_file=os.path.join(
            output_folder, 'TrackCandidates_prealigned.h5'),
        input_alignment_file=os.path.join(output_folder, 'Alignment.h5'),
        output_tracks_file=os.path.join(output_folder, 'Tracks_prealigned.h5'),
        exclude_dut_hit=
        True,  # To get unconstrained residuals do not use DUT hit for track fit
        selection_track_quality=0,
        min_track_distance=
        1000,  # To get close to excact efficiency heavily avoid merged tracks
        force_prealignment=True)

    result_analysis.calculate_efficiency(
        input_tracks_file=os.path.join(output_folder, 'Tracks_prealigned.h5'),
        input_alignment_file=os.path.join(output_folder, 'Alignment.h5'),
        output_file=os.path.join(output_folder, 'Efficiency.h5'),
        output_pdf=os.path.join(output_folder, 'Efficiency.pdf'),
        bin_size=[(250, 50)],
        sensor_size=[(250. * 80, 50. * 336)],
        minimum_track_density=2,
        use_duts=None,
        cut_distance=500,
        max_distance=500,
        col_range=None,
        row_range=None,
        pixel_size=simulate_data.dut_pixel_size,
        n_pixels=simulate_data.dut_n_pixel,
        force_prealignment=True,
        show_inefficient_events=True)
def run_analysis():
    # Get the absolute path of example data
    tests_data_folder = os.path.join(
        os.path.dirname(
            os.path.abspath(inspect.getfile(inspect.currentframe()))), 'data')

    # The location of the example data files, one file per DUT
    data_files = [(os.path.join(tests_data_folder,
                                'TestBeamData_Mimosa26_DUT%d' % i + '.h5'))
                  for i in range(6)]

    # Pixel dimesions and matrix size of the DUTs
    pixel_size = [(18.4, 18.4)] * 6  # Column, row pixel pitch in um
    n_pixels = [(1152, 576)] * 6  # Number of pixel on column, row

    z_positions = [0., 15000, 30000, 45000, 60000, 75000]  # z position in um
    # Friendly names for plotting
    dut_names = ("Tel_0", "Tel_1", "Tel_2", "Tel_3", "Tel_4", "Tel_5")

    # Create output subfolder where all output data and plots are stored
    output_folder = os.path.join(
        os.path.split(data_files[0])[0], 'output_eutel')
    if not os.path.exists(output_folder):
        os.makedirs(output_folder)

    # The following shows a complete test beam analysis by calling the
    # seperate function in correct order

    # Remove hot pixels, only needed for devices wih noisy pixel like Mimosa 26
    # A pool of workers to remove the noisy pixels in all files in parallel
    kwargs = [{
        'input_hits_file': data_files[i],
        'n_pixel': n_pixels[i],
        'pixel_size': pixel_size[i],
        'threshold': 0.5,
        'dut_name': dut_names[i]
    } for i in range(0, len(data_files))]
    pool = Pool()
    for kwarg in kwargs:
        pool.apply_async(hit_analysis.remove_noisy_pixels, kwds=kwarg)
    pool.close()
    pool.join()

    # Cluster hits off all DUTs
    # A pool of workers to cluster hits in all files in parallel
    kwargs = [{
        'input_hits_file': data_files[i][:-3] + '_noisy_pixels.h5',
        'max_x_distance': 3,
        'max_y_distance': 3,
        'max_time_distance': 2,
        'max_cluster_hits': 1000000,
        'dut_name': dut_names[i]
    } for i in range(0, len(data_files))]
    pool = Pool()
    for kwarg in kwargs:
        pool.apply_async(hit_analysis.cluster_hits, kwds=kwarg)
    pool.close()
    pool.join()

    # Correlate the row / column of each DUT
    input_cluster_files = [
        data_file[:-3] + '_noisy_pixels_cluster.h5' for data_file in data_files
    ]
    dut_alignment.correlate_cluster(input_cluster_files=input_cluster_files,
                                    output_correlation_file=os.path.join(
                                        output_folder, 'Correlation.h5'),
                                    n_pixels=n_pixels,
                                    pixel_size=pixel_size,
                                    dut_names=dut_names)

    # Create prealignment relative to the first DUT from the correlation data
    input_correlation_file = os.path.join(output_folder, 'Correlation.h5')
    dut_alignment.prealignment(
        input_correlation_file=input_correlation_file,
        output_alignment_file=os.path.join(output_folder, 'Alignment.h5'),
        z_positions=z_positions,
        pixel_size=pixel_size,
        dut_names=dut_names,
        # This data has several tracks per event and
        # noisy pixel, thus fit existing background
        fit_background=True,
        # Tries to find cuts automatically;
        # deactivate to do this manualy
        non_interactive=True)

    # Merge the cluster tables to one merged table aligned at the event number
    input_cluster_files = [
        data_file[:-3] + '_noisy_pixels_cluster.h5' for data_file in data_files
    ]
    dut_alignment.merge_cluster_data(input_cluster_files=input_cluster_files,
                                     output_merged_file=os.path.join(
                                         output_folder, 'Merged.h5'),
                                     n_pixels=n_pixels,
                                     pixel_size=pixel_size)

    # Apply the prealignment to the merged cluster table to create tracklets
    dut_alignment.apply_alignment(
        input_hit_file=os.path.join(output_folder, 'Merged.h5'),
        input_alignment=os.path.join(output_folder, 'Alignment.h5'),
        output_hit_aligned_file=os.path.join(output_folder,
                                             'Tracklets_prealigned.h5'),
        force_prealignment=True)

    # Find tracks from the prealigned tracklets and stores them with quality
    # indicator into track candidates table
    track_analysis.find_tracks(
        input_tracklets_file=os.path.join(output_folder,
                                          'Tracklets_prealigned.h5'),
        input_alignment_file=os.path.join(output_folder, 'Alignment.h5'),
        output_track_candidates_file=os.path.join(
            output_folder, 'TrackCandidates_prealignment.h5'))

    # The following two steps are for demonstration only.
    # They show track fitting and residual calculation on
    # prealigned hits. Usually you are not interested in this and will use
    # the aligned hits directly.

    # Step 1.: Fit the track candidates and create new track table (using the
    # prealignment!)
    track_analysis.fit_tracks(
        input_track_candidates_file=os.path.join(
            output_folder, 'TrackCandidates_prealignment.h5'),
        input_alignment_file=os.path.join(output_folder, 'Alignment.h5'),
        output_tracks_file=os.path.join(output_folder, 'Tracks_prealigned.h5'),
        # To get unconstrained residuals do not use DUT
        # hit for track fit
        exclude_dut_hit=True,
        # This is just for demonstration purpose, usually
        # uses fully aligned hits
        force_prealignment=True,
        selection_track_quality=0)  # We will cut on chi2

    # Step 2.:  Calculate the residuals to check the alignment (using the
    # prealignment!)
    result_analysis.calculate_residuals(
        input_tracks_file=os.path.join(output_folder, 'Tracks_prealigned.h5'),
        input_alignment_file=os.path.join(output_folder, 'Alignment.h5'),
        output_residuals_file=os.path.join(output_folder,
                                           'Residuals_prealigned.h5'),
        n_pixels=n_pixels,
        pixel_size=pixel_size,
        max_chi2=2000,
        # This is just for demonstration purpose
        # you usually use fully aligned hits
        force_prealignment=True)

    # Do an alignment step with the track candidates, corrects rotations and
    # is therefore much more precise than simple prealignment
    dut_alignment.alignment(input_track_candidates_file=os.path.join(
        output_folder, 'TrackCandidates_prealignment.h5'),
                            input_alignment_file=os.path.join(
                                output_folder, 'Alignment.h5'),
                            n_pixels=n_pixels,
                            pixel_size=pixel_size)

    # Apply the alignment to the merged cluster table to create tracklets
    dut_alignment.apply_alignment(
        input_hit_file=os.path.join(output_folder, 'Merged.h5'),
        input_alignment=os.path.join(output_folder, 'Alignment.h5'),
        output_hit_aligned_file=os.path.join(output_folder, 'Tracklets.h5'))

    # Find tracks from the tracklets and stores the with quality indicator
    # into track candidates table
    track_analysis.find_tracks(
        input_tracklets_file=os.path.join(output_folder, 'Tracklets.h5'),
        input_alignment_file=os.path.join(output_folder, 'Alignment.h5'),
        output_track_candidates_file=os.path.join(output_folder,
                                                  'TrackCandidates.h5'))

    # Example 1: use all DUTs in fit and cut on chi2
    track_analysis.fit_tracks(
        input_track_candidates_file=os.path.join(output_folder,
                                                 'TrackCandidates.h5'),
        input_alignment_file=os.path.join(output_folder, 'Alignment.h5'),
        output_tracks_file=os.path.join(output_folder, 'Tracks_all.h5'),
        # To get unconstrained residuals do not use DUT
        # hit for track fit
        exclude_dut_hit=True,
        # We do not cut on track quality but on chi2 later
        selection_track_quality=0)

    # Create unconstrained residuals
    result_analysis.calculate_residuals(
        input_tracks_file=os.path.join(output_folder, 'Tracks_all.h5'),
        input_alignment_file=os.path.join(output_folder, 'Alignment.h5'),
        output_residuals_file=os.path.join(output_folder, 'Residuals_all.h5'),
        # The chi2 cut has a large influence on
        # the residuals and number of tracks,
        # since the resolution is dominated by
        # multiple scattering
        max_chi2=500,
        n_pixels=n_pixels,
        pixel_size=pixel_size)

    # Example 2: Use only 2 DUTs next to the fit DUT and cut on track quality.
    # Thus the track fit is just a track interpolation with chi2 = 0.
    # This is better here due to heavily scatterd tracks, where a straight line
    # assumption for all DUTs is wrong.
    # This leads to symmetric residuals in x and y for all DUTs between 2 DUTs
    # (= DUTs: 1, 2, 3, 4)
    track_analysis.fit_tracks(
        input_track_candidates_file=os.path.join(output_folder,
                                                 'TrackCandidates.h5'),
        input_alignment_file=os.path.join(output_folder, 'Alignment.h5'),
        output_tracks_file=os.path.join(output_folder, 'Tracks_some.h5'),
        selection_hit_duts=[
            [1, 2],  # Only select DUTs next to the DUT to fit
            [0, 2],
            [1, 3],
            [2, 4],
            [3, 5],
            [3, 4]
        ],
        selection_track_quality=1)  # We cut on track quality

    # Create unconstrained residuals
    result_analysis.calculate_residuals(
        input_tracks_file=os.path.join(output_folder, 'Tracks_some.h5'),
        input_alignment_file=os.path.join(output_folder, 'Alignment.h5'),
        output_residuals_file=os.path.join(output_folder, 'Residuals_some.h5'),
        n_pixels=n_pixels,
        pixel_size=pixel_size)
Exemple #5
0
def run_analysis(data_files):
#     PIX1 run
    pixel_size = [(250.,50.),(50.,250.),(250.,50.),(50.,250.)]  # in um
    n_pixels = [(80,336),(336,80),(80,336),(336,80)]
    z_positions = [0., 6400, 282200, 288600]#  [0. ,131500, 138300, 420500, 707100] # (in um)
    dut_names = ("Tel_1", "Tel_2", "Tel_3","Tel_4")

    # Create output subfolder where all output data and plots are stored
    output_folder = os.path.join(os.path.split(data_files[0])[0], 'output')
    if not os.path.exists(output_folder):
        os.makedirs(output_folder)

    # The following shows a complete test beam analysis by calling the seperate
    # function in correct order

    # Generate noisy pixel mask for all DUTs
    for i, data_file in enumerate(data_files):
        hit_analysis.generate_pixel_mask(input_hits_file=data_file,
                                         n_pixel=n_pixels[i],
                                         pixel_mask_name='NoisyPixelMask',
                                         pixel_size=pixel_size[i],
                                         threshold=7.5,
                                         dut_name=dut_names[i])
 
#     # Cluster hits from all DUTs
    for i, data_file in enumerate(data_files):
        hit_analysis.cluster_hits_niko(input_hits_file=data_file,
#                                   input_noisy_pixel_mask_file=os.path.splitext(data_file)[0] + '_noisy_pixel_mask.h5',
                                  min_hit_charge=0,
                                  max_hit_charge=13,
                                  column_cluster_distance=1,
                                  row_cluster_distance=2,
                                  frame_cluster_distance=2,
                                  dut_name=dut_names[i])
 
#     # Generate filenames for cluster data
    input_cluster_files = [os.path.splitext(data_file)[0] + '_clustered.h5'
                           for data_file in data_files]
 
#     # Correlate the row / column of each DUT
    dut_alignment.correlate_cluster(input_cluster_files=input_cluster_files,
                                    output_correlation_file=os.path.join(output_folder, 'Correlation.h5'),
                                    n_pixels=n_pixels,
                                    pixel_size=pixel_size,
                                    dut_names=dut_names)
 
    # Correct all DUT hits via alignment information and merge the cluster tables to one tracklets table aligned at the event number
    dut_alignment.merge_cluster_data(input_cluster_files=input_cluster_files,
                                     n_pixels=n_pixels,
                                     output_merged_file=os.path.join(output_folder, 'Merged.h5'),
                                     pixel_size=pixel_size)

        # Create prealignment data for the DUT positions to the first DUT from the correlations
    dut_alignment.prealignment(input_correlation_file=os.path.join(output_folder, 'Correlation.h5'),
                               output_alignment_file=os.path.join(output_folder, 'Alignment.h5'),
                               z_positions=z_positions,
                               pixel_size=pixel_size,
                               s_n=0.1,
                               fit_background=False,
                               reduce_background=False,
                               dut_names=dut_names,
                               non_interactive=True)  # Tries to find cuts automatically; deactivate to do this manualy

    dut_alignment.apply_alignment(input_hit_file=os.path.join(output_folder, 'Merged.h5'),
                                  input_alignment_file = alignment_file,
#                                   input_alignment_file=os.path.join(output_folder, 'Alignment.h5'),
                                  output_hit_file=os.path.join(output_folder, 'Tracklets_prealigned.h5'),
                                  force_prealignment=True)  # If there is already an alignment info in the alignment file this has to be set)

    # Find tracks from the tracklets and stores the with quality indicator into track candidates table
    track_analysis.find_tracks(input_tracklets_file=os.path.join(output_folder, 'Tracklets_prealigned.h5'),
                               input_alignment_file = alignment_file,
#                                input_alignment_file=os.path.join(output_folder, 'Alignment.h5'),
                               output_track_candidates_file=os.path.join(output_folder, 'TrackCandidates_prealigned.h5'))  # If there is already an alignment info in the alignment file this has to be set

    # Fit the track candidates and create new track table
    track_analysis.fit_tracks(input_track_candidates_file=os.path.join(output_folder, 'TrackCandidates_prealigned.h5'),
                              input_alignment_file = alignment_file,
#                               input_alignment_file=os.path.join(output_folder, 'Alignment.h5'),
                              output_tracks_file=os.path.join(output_folder, 'Tracks_prealigned.h5'),
                              fit_duts=[0, 1, 2, 3],
                              selection_track_quality=1,
                              force_prealignment=True)

    # Optional: plot some tracks (or track candidates) of a selected event range
    plot_utils.plot_events(input_tracks_file=os.path.join(output_folder, 'Tracks_prealigned.h5'),
                           output_pdf_file=os.path.join(output_folder, 'Event.pdf'),
                           event_range=(0, 40),
                           dut=1)

    # Calculate the unconstrained residuals to check the alignment
    result_analysis.calculate_residuals(input_tracks_file=os.path.join(output_folder, 'Tracks_prealigned.h5'),
                                        input_alignment_file=os.path.join(output_folder, 'Alignment.h5'),
                                        output_residuals_file=os.path.join(output_folder, 'Residuals_prealigned.h5'),
                                        n_pixels=n_pixels,
                                        pixel_size=pixel_size,
                                        force_prealignment=True)

    # Calculate the efficiency and mean hit/track hit distance
    # When needed, set included column and row range for each DUT as list of tuples
    result_analysis.calculate_efficiency(input_tracks_file=os.path.join(output_folder, 'Tracks_prealigned.h5'),
                                         input_alignment_file=os.path.join(output_folder, 'Alignment.h5'),
                                         output_efficiency_file=os.path.join(output_folder, 'Efficiency.h5'),
                                         bin_size=[(250, 50)],
                                         sensor_size=[(250. * 80, 50. * 336)],
                                         minimum_track_density=2,
                                         use_duts=None,
                                         cut_distance=500,
                                         max_distance=500,
                                         col_range=None,
                                         row_range=None,
                                         pixel_size=pixel_size,
                                         n_pixels=n_pixels,
                                         force_prealignment=True)
Exemple #6
0
def run_analysis():
    # Get the absolute path of example data
    tests_data_folder = os.path.join(
        os.path.dirname(
            os.path.abspath(inspect.getfile(inspect.currentframe()))), 'data')

    # The location of the data files, one file per DUT
    data_files = [
        (os.path.join(tests_data_folder,
                      'TestBeamData_FEI4_DUT%d' % i + '.h5'))
        for i in [0, 1, 4, 5]
    ]  # The first device is the reference for the coordinate system

    # Dimensions
    pixel_size = [(250, 50)] * 4  # in um
    n_pixels = [(80, 336)] * 4
    z_positions = [0., 19500, 108800, 128300]  # in um
    dut_names = ("Tel_0", "Tel_1", "Tel_2", "Tel_3")

    # Create output subfolder where all output data and plots are stored
    output_folder = os.path.join(
        os.path.split(data_files[0])[0], 'output_fei4')
    if not os.path.exists(output_folder):
        os.makedirs(output_folder)

    # The following shows a complete test beam analysis by calling the seperate function in correct order

    # Cluster hits off all DUTs
    kwargs = [
        {  # Input parameters of the cluster function
            'input_hits_file': data_files[i],
            'max_x_distance': 2,
            'max_y_distance': 1,
            'max_time_distance': 2,
            'max_cluster_hits': 1000,
            'dut_name': dut_names[i]
        } for i in range(0, len(data_files))
    ]
    pool = Pool()
    for kwarg in kwargs:
        pool.apply_async(
            hit_analysis.cluster_hits, kwds=kwarg
        )  # Non blocking call of the cluster function, runs in seperate process
    pool.close()
    pool.join()

    # Correlate the row / column of each DUT
    dut_alignment.correlate_cluster(input_cluster_files=[
        data_file[:-3] + '_cluster.h5' for data_file in data_files
    ],
                                    output_correlation_file=os.path.join(
                                        output_folder, 'Correlation.h5'),
                                    n_pixels=n_pixels,
                                    pixel_size=pixel_size,
                                    dut_names=dut_names)

    # Correct all DUT hits via alignment information and merge the cluster tables to one tracklets table aligned at the event number
    dut_alignment.merge_cluster_data(input_cluster_files=[
        data_file[:-3] + '_cluster.h5' for data_file in data_files
    ],
                                     n_pixels=n_pixels,
                                     output_merged_file=os.path.join(
                                         output_folder, 'Merged.h5'),
                                     pixel_size=pixel_size)

    # Create prealignment data for the DUT positions to the first DUT from the correlations
    dut_alignment.prealignment(
        input_correlation_file=os.path.join(output_folder, 'Correlation.h5'),
        output_alignment_file=os.path.join(output_folder, 'Alignment.h5'),
        z_positions=z_positions,
        pixel_size=pixel_size,
        s_n=0.1,
        fit_background=False,
        reduce_background=False,
        dut_names=dut_names,
        non_interactive=True
    )  # Tries to find cuts automatically; deactivate to do this manualy

    dut_alignment.apply_alignment(
        input_hit_file=os.path.join(output_folder, 'Merged.h5'),
        input_alignment=os.path.join(output_folder, 'Alignment.h5'),
        output_hit_aligned_file=os.path.join(output_folder,
                                             'Tracklets_prealigned.h5'),
        force_prealignment=True
    )  # If there is already an alignment info in the alignment file this has to be set)

    # Find tracks from the tracklets and stores the with quality indicator into track candidates table
    track_analysis.find_tracks(
        input_tracklets_file=os.path.join(output_folder,
                                          'Tracklets_prealigned.h5'),
        input_alignment_file=os.path.join(output_folder, 'Alignment.h5'),
        output_track_candidates_file=os.path.join(
            output_folder, 'TrackCandidates_prealigned.h5')
    )  # If there is already an alignment info in the alignment file this has to be set

    # Fit the track candidates and create new track table
    track_analysis.fit_tracks(
        input_track_candidates_file=os.path.join(
            output_folder, 'TrackCandidates_prealigned.h5'),
        input_alignment_file=os.path.join(output_folder, 'Alignment.h5'),
        output_tracks_file=os.path.join(output_folder, 'Tracks_prealigned.h5'),
        fit_duts=[0, 1, 2, 3],
        selection_track_quality=1,
        force_prealignment=True)

    # Optional: plot some tracks (or track candidates) of a selected event range
    plot_utils.plot_events(input_tracks_file=os.path.join(
        output_folder, 'Tracks_prealigned.h5'),
                           output_pdf=os.path.join(output_folder, 'Event.pdf'),
                           event_range=(0, 40),
                           dut=1)

    # Calculate the unconstrained residuals to check the alignment
    result_analysis.calculate_residuals(
        input_tracks_file=os.path.join(output_folder, 'Tracks_prealigned.h5'),
        input_alignment_file=os.path.join(output_folder, 'Alignment.h5'),
        output_residuals_file=os.path.join(output_folder,
                                           'Residuals_prealigned.h5'),
        n_pixels=n_pixels,
        pixel_size=pixel_size,
        force_prealignment=True)

    # Calculate the efficiency and mean hit/track hit distance
    # When needed, set included column and row range for each DUT as list of tuples
    result_analysis.calculate_efficiency(
        input_tracks_file=os.path.join(output_folder, 'Tracks_prealigned.h5'),
        input_alignment_file=os.path.join(output_folder, 'Alignment.h5'),
        output_file=os.path.join(output_folder, 'Efficiency.h5'),
        output_pdf=os.path.join(output_folder, 'Efficiency.pdf'),
        bin_size=[(250, 50)],
        sensor_size=[(250. * 80, 50. * 336)],
        minimum_track_density=2,
        use_duts=None,
        cut_distance=500,
        max_distance=500,
        col_range=None,
        row_range=None,
        pixel_size=pixel_size,
        n_pixels=n_pixels,
        force_prealignment=True)