示例#1
0
    def test_efficiency_calculation(self):
        efficiencies = result_analysis.calculate_efficiency(
            input_tracks_file=analysis_utils.get_data(
                'fixtures/result_analysis/Tracks_result.h5'),
            input_alignment_file=analysis_utils.get_data(
                'fixtures/result_analysis/Alignment_result.h5'),
            output_efficiency_file=os.path.join(self.output_folder,
                                                'Efficiency.h5'),
            bin_size=[(250, 50)] * 4,
            sensor_size=[(250 * 80., 336 * 50.)] * 4,
            pixel_size=[(250, 50)] * 4,
            n_pixels=[(80, 336)] * 4,
            minimum_track_density=2,
            use_duts=None,
            cut_distance=500,
            max_distance=500,
            #col_range=[(1250, 17500)]*4,
            #row_range=[(1000, 16000)]*4,
            force_prealignment=True)

        self.assertAlmostEqual(efficiencies[0],
                               100.000,
                               msg='DUT 0 efficiencies do not match',
                               places=3)
        self.assertAlmostEqual(efficiencies[1],
                               98.7013,
                               msg='DUT 1 efficiencies do not match',
                               places=3)
        self.assertAlmostEqual(efficiencies[2],
                               97.4684,
                               msg='DUT 2 efficiencies do not match',
                               places=3)
        self.assertAlmostEqual(efficiencies[3],
                               100.000,
                               msg='DUT 3 efficiencies do not match',
                               places=3)
def run_analysis():
    # The location of the example data files, one file per DUT
    data_files = [
        r'H:\Testbeam_05052016_LFCMOS\Telescope_data\kartel-converted-synchronized_plane0.h5',
        r'H:\Testbeam_05052016_LFCMOS\Telescope_data\kartel-converted-synchronized_plane1.h5',
        r'H:\Testbeam_05052016_LFCMOS\Telescope_data\kartel-converted-synchronized_plane2.h5',
        r'H:\Testbeam_05052016_LFCMOS\Telescope_data\fe_dut-converted-synchronized_plane0.h5',
        r'H:\Testbeam_05052016_LFCMOS\Telescope_data\fe_dut-converted-synchronized_plane1.h5',
        r'H:\Testbeam_05052016_LFCMOS\Telescope_data\kartel-converted-synchronized_plane3.h5',
        r'H:\Testbeam_05052016_LFCMOS\Telescope_data\kartel-converted-synchronized_plane4.h5',
        r'H:\Testbeam_05052016_LFCMOS\Telescope_data\kartel-converted-synchronized_plane5.h5'
    ]  # The first device is the reference for the coordinate system

    # Pixel dimesions and matrix size of the DUTs
    pixel_size = [(18.4, 18.4), (18.4, 18.4), (18.4, 18.4), (250, 50),
                  (250, 50), (18.4, 18.4), (18.4, 18.4),
                  (18.4, 18.4)]  # (Column, row) pixel pitch in um
    n_pixels = [(1152, 576), (1152, 576), (1152, 576), (80, 336), (80, 336),
                (1152, 576), (1152, 576),
                (1152, 576)]  # (Column, row) dimensions of the pixel matrix
    z_positions = [
        0., 20000, 40000, 40000 + 101000, 40000 + 101000 + 23000, 247000,
        267000, 287000
    ]  # in um
    dut_names = ("Tel 0", "Tel 1", "Tel 2", "LFCMOS3", "FEI4 Reference",
                 "Tel 3", "Tel 4", "Tel 5")  # Friendly names for plotting

    # Folder where all output data and plots are stored
    output_folder = r'H:\Testbeam_05052016_LFCMOS\output'

    # The following shows a complete test beam analysis by calling the seperate function in correct order

    # Generate noisy pixel mask for all DUTs
    threshold = [2, 2, 2, 10, 10, 2, 2, 2]
    for i, data_file in enumerate(data_files):
        hit_analysis.generate_pixel_mask(input_hits_file=data_file,
                                         n_pixel=n_pixels[i],
                                         pixel_mask_name='NoisyPixelMask',
                                         pixel_size=pixel_size[i],
                                         threshold=threshold[i],
                                         dut_name=dut_names[i])

    # Cluster hits from all DUTs
    column_cluster_distance = [3, 3, 3, 2, 2, 3, 3, 3]
    row_cluster_distance = [3, 3, 3, 3, 3, 3, 3, 3]
    frame_cluster_distance = [0, 0, 0, 0, 0, 0, 0, 0]
    for i, data_file in enumerate(data_files):
        hit_analysis.cluster_hits(
            input_hits_file=data_file,
            input_noisy_pixel_mask_file=os.path.splitext(data_files[i])[0] +
            '_noisy_pixel_mask.h5',
            min_hit_charge=0,
            max_hit_charge=13,
            column_cluster_distance=column_cluster_distance[i],
            row_cluster_distance=row_cluster_distance[i],
            frame_cluster_distance=frame_cluster_distance[i],
            dut_name=dut_names[i])

    # Generate filenames for cluster data
    input_cluster_files = [
        os.path.splitext(data_file)[0] + '_clustered.h5'
        for data_file in data_files
    ]

    # Correlate the row / column of each DUT
    dut_alignment.correlate_cluster(input_cluster_files=input_cluster_files,
                                    output_correlation_file=os.path.join(
                                        output_folder, 'Correlation.h5'),
                                    n_pixels=n_pixels,
                                    pixel_size=pixel_size,
                                    dut_names=dut_names)

    # Create prealignment relative to the first DUT from the correlation data
    dut_alignment.prealignment(
        input_correlation_file=os.path.join(output_folder, 'Correlation.h5'),
        output_alignment_file=os.path.join(output_folder, 'Alignment.h5'),
        z_positions=z_positions,
        pixel_size=pixel_size,
        dut_names=dut_names,
        fit_background=True,
        non_interactive=False
    )  # Tries to find cuts automatically; deactivate to do this manualy

    # Merge the cluster tables to one merged table aligned at the event number
    dut_alignment.merge_cluster_data(input_cluster_files=input_cluster_files,
                                     output_merged_file=os.path.join(
                                         output_folder, 'Merged.h5'),
                                     n_pixels=n_pixels,
                                     pixel_size=pixel_size)

    # Apply the prealignment to the merged cluster table to create tracklets
    dut_alignment.apply_alignment(
        input_hit_file=os.path.join(output_folder, 'Merged.h5'),
        input_alignment_file=os.path.join(output_folder, 'Alignment.h5'),
        output_hit_file=os.path.join(output_folder, 'Tracklets_prealigned.h5'),
        force_prealignment=True)

    # Find tracks from the prealigned tracklets and stores the with quality indicator into track candidates table
    track_analysis.find_tracks(
        input_tracklets_file=os.path.join(output_folder,
                                          'Tracklets_prealigned.h5'),
        input_alignment_file=os.path.join(output_folder, 'Alignment.h5'),
        output_track_candidates_file=os.path.join(
            output_folder, 'TrackCandidates_prealignment.h5'))

    # Select tracks with a hit in the time reference (DUT 4) and all position devices to increase analysis speed due to data reduction
    data_selection.select_hits(hit_file=os.path.join(
        output_folder, 'TrackCandidates_prealignment.h5'),
                               track_quality=0b11110111,
                               track_quality_mask=0b11110111)

    # Do an alignment step with the track candidates, corrects rotations and is therefore much more precise than simple prealignment
    dut_alignment.alignment(
        input_track_candidates_file=os.path.join(
            output_folder, 'TrackCandidates_prealignment_reduced.h5'),
        input_alignment_file=os.path.join(output_folder, 'Alignment.h5'),
        # Order of combinaions of planes to align, one should start with high resoultion planes (here: telescope planes)
        align_duts=[
            [0, 1, 2, 5, 6, 7],  # align the telescope planes first
            [4],  # align the time reference after the telescope alignment
            [3]
        ],  # align the DUT last and not with the reference since it is rather small and would make the time reference alinmnt worse
        # The DUTs to be used in the fit, always just the high resolution Mimosa26 planes used
        select_fit_duts=[0, 1, 2, 5, 6, 7],
        # The DUTs to be required to have a hit for the alignment
        select_hit_duts=[
            [0, 1, 2, 4, 5, 6, 7],  # Take tracks with time reference hit
            [0, 1, 2, 4, 5, 6, 7],  # Take tracks with time reference hit
            [0, 1, 2, 3, 4, 5, 6, 7]
        ],  # Also require hit in the small DUT
        # The required track quality per alignment step and DUT
        selection_track_quality=[
            [1, 1, 1, 0, 1, 1,
             1],  # Do not require a good hit in the time refernce
            [1, 1, 1, 1, 1, 1, 1],
            [1, 1, 1, 1, 0, 1, 1, 1]
        ],  # Do not require a good hit in the small DUT
        initial_rotation=[
            [0., 0., 0.],
            [0., 0., 0.],
            [0., 0., 0.],
            # Devices 3, 4 are heavily rotated (inverted), this is not implemented now
            # Thus one has to set the correct rotation angles here manually
            [np.pi - 0.05, -0.05, -0.005],
            [np.pi - 0.01, -0.02, -0.0005],
            [0., 0, 0.],
            [0., 0, 0.],
            [0., 0, 0.]
        ],
        initial_translation=[
            [0., 0, 0.],
            [0., 0, 0.],
            [0., 0, 0.],
            # Devices 3, 4 are heavily rotated (inverted), this is not implemented now
            # Thus one has to set the correct positions here manually
            [11540, 18791, 0.],
            [710., 9851., 0.],
            [0., 0, 0.],
            [0., 0, 0.],
            [0., 0, 0.]
        ],
        n_pixels=n_pixels,
        use_n_tracks=
        200000,  # Do the alignment only on a subset of data, needed for reasonable run time
        pixel_size=pixel_size)

    # Apply new alignment to data
    # Revert alignment from track candidates. Usually one would just apply the alignment to the merged data.
    # Due to the large beam angle track finding fails on aligned data. Thus rely on the found tracks from prealignment.
    dut_alignment.apply_alignment(
        input_hit_file=os.path.join(output_folder,
                                    'TrackCandidates_prealignment_reduced.h5'),
        input_alignment_file=os.path.join(output_folder, 'Alignment.h5'),
        output_hit_file=os.path.join(
            output_folder, 'Merged_small.h5'
        ),  # This is the new not aligned but preselected merged data file to apply (pre-) alignment on
        inverse=True,
        force_prealignment=True)

    # Apply the alignment to the merged cluster table to create tracklets
    dut_alignment.apply_alignment(
        input_hit_file=os.path.join(output_folder, 'Merged_small.h5'),
        input_alignment_file=os.path.join(output_folder, 'Alignment.h5'),
        output_hit_file=os.path.join(output_folder, 'TrackCandidates.h5'))

    # Fit track using alignment
    track_analysis.fit_tracks(
        input_track_candidates_file=os.path.join(output_folder,
                                                 'TrackCandidates.h5'),
        input_alignment_file=os.path.join(output_folder, 'Alignment.h5'),
        output_tracks_file=os.path.join(output_folder, 'Tracks.h5'),
        select_hit_duts=[0, 1, 2, 4, 5, 6, 7],
        select_fit_duts=[0, 1, 2, 5, 6, 7],
        selection_track_quality=1
    )  # Take all tracks with good hits, do not care about time reference hit quality

    # Create unconstrained residuals with aligned data
    result_analysis.calculate_residuals(
        input_tracks_file=os.path.join(output_folder, 'Tracks.h5'),
        input_alignment_file=os.path.join(output_folder, 'Alignment.h5'),
        output_residuals_file=os.path.join(output_folder, 'Residuals.h5'),
        n_pixels=n_pixels,
        pixel_size=pixel_size)

    # Calculate efficiency with aligned data
    result_analysis.calculate_efficiency(
        input_tracks_file=os.path.join(output_folder, 'Tracks.h5'),
        input_alignment_file=os.path.join(output_folder, 'Alignment.h5'),
        output_efficiency_file=os.path.join(output_folder, 'Efficiency.h5'),
        bin_size=(10, 10),
        use_duts=[3],
        sensor_size=[(20000, 10000), (20000, 10000), (20000, 10000),
                     (20000, 20000), (20000, 10000), (20000, 10000),
                     (20000, 10000)])

    # Fit tracks using prealignmend
    track_analysis.fit_tracks(
        input_track_candidates_file=os.path.join(
            output_folder, 'TrackCandidates_prealignment_reduced.h5'),
        input_alignment_file=os.path.join(output_folder, 'Alignment.h5'),
        output_tracks_file=os.path.join(output_folder,
                                        'Tracks_prealignment.h5'),
        force_prealignment=True,
        select_hit_duts=[0, 1, 2, 4, 5, 6, 7],
        select_fit_duts=[0, 1, 2, 5, 6, 7],
        selection_track_quality=1
    )  # Take all tracks with good hits, do not care about time reference hit quality

    # Create unconstrained residuals with prealigned data
    result_analysis.calculate_residuals(
        input_tracks_file=os.path.join(output_folder,
                                       'Tracks_prealignment.h5'),
        input_alignment_file=os.path.join(output_folder, 'Alignment.h5'),
        output_residuals_file=os.path.join(output_folder,
                                           'Residuals_prealignment.h5'),
        force_prealignment=True,
        n_pixels=n_pixels,
        pixel_size=pixel_size)

    # Create efficiency plot with prealigned data
    result_analysis.calculate_efficiency(
        input_tracks_file=os.path.join(output_folder,
                                       'Tracks_prealignment.h5'),
        input_alignment_file=os.path.join(output_folder, 'Alignment.h5'),
        output_efficiency_file=os.path.join(output_folder,
                                            'Efficiency_prealignment.h5'),
        force_prealignment=True,
        bin_size=(10, 10),
        use_duts=[3],
        sensor_size=[(20000, 10000), (20000, 10000), (20000, 10000),
                     (20000, 20000), (20000, 10000), (20000, 10000),
                     (20000, 10000)])
def run_analysis(hit_files):
    # Create output subfolder where all output data and plots are stored
    output_folder = os.path.join(
        os.path.split(hit_files[0])[0], 'output_fei4_telescope')
    if not os.path.exists(output_folder):
        os.makedirs(output_folder)

    mask_files = [(os.path.splitext(hit_file)[0] + '_mask.h5')
                  for hit_file in hit_files]
    cluster_files = [(os.path.splitext(hit_file)[0] + '_clustered.h5')
                     for hit_file in hit_files]

    z_positions = [0.0, 19500.0, 108800.0, 128300.0]  # in um
    initial_configuration = os.path.join(output_folder, 'telescope.yaml')
    telescope = Telescope()
    telescope.add_dut(dut_type="FEI4",
                      dut_id=0,
                      translation_x=0,
                      translation_y=0,
                      translation_z=z_positions[0],
                      rotation_alpha=0,
                      rotation_beta=0,
                      rotation_gamma=0,
                      name="Telescope 1")
    telescope.add_dut(dut_type="FEI4",
                      dut_id=1,
                      translation_x=0,
                      translation_y=0,
                      translation_z=z_positions[1],
                      rotation_alpha=0,
                      rotation_beta=0,
                      rotation_gamma=0,
                      name="Telescope 2")
    telescope.add_dut(dut_type="FEI4",
                      dut_id=2,
                      translation_x=0,
                      translation_y=0,
                      translation_z=z_positions[2],
                      rotation_alpha=0,
                      rotation_beta=0,
                      rotation_gamma=0,
                      name="Telescope 3")
    telescope.add_dut(dut_type="FEI4",
                      dut_id=3,
                      translation_x=0,
                      translation_y=0,
                      translation_z=z_positions[3],
                      rotation_alpha=0,
                      rotation_beta=0,
                      rotation_gamma=0,
                      name="Telescope 4")
    telescope.save_configuration(initial_configuration)
    prealigned_configuration = os.path.join(output_folder,
                                            'telescope_prealigned.yaml')
    aligned_configuration = os.path.join(output_folder,
                                         'telescope_aligned.yaml')

    check_files = hit_analysis.check(
        telescope_configuration=initial_configuration,
        input_hit_files=hit_files)

    # Generate noisy pixel mask for all DUTs
    thresholds = [100, 100, 100, 100]
    pixel_mask_names = ["NoisyPixelMask"] * len(thresholds)
    mask_files = hit_analysis.mask(
        telescope_configuration=initial_configuration,
        input_hit_files=hit_files,
        pixel_mask_names=pixel_mask_names,
        thresholds=thresholds)

    # Cluster hits from all DUTs
    use_positions = [False, False, False, False]
    min_hit_charges = [0, 0, 0, 0]
    max_hit_charges = [13, 13, 13, 13]
    column_cluster_distances = [1, 1, 1, 1]
    row_cluster_distances = [3, 3, 3, 3]
    frame_cluster_distances = [4, 4, 4, 4]
    cluster_files = hit_analysis.cluster(
        telescope_configuration=initial_configuration,
        select_duts=None,
        input_hit_files=hit_files,
        input_mask_files=[
            None if val else mask_files[i]
            for i, val in enumerate(use_positions)
        ],
        use_positions=use_positions,
        min_hit_charges=min_hit_charges,
        max_hit_charges=max_hit_charges,
        column_cluster_distances=column_cluster_distances,
        row_cluster_distances=row_cluster_distances,
        frame_cluster_distances=frame_cluster_distances)

    # Correlate each DUT with the first DUT
    hit_analysis.correlate(telescope_configuration=initial_configuration,
                           input_files=cluster_files,
                           output_correlation_file=os.path.join(
                               output_folder, 'Correlation.h5'),
                           resolution=(250.0, 50.0),
                           select_reference_duts=0)

    # Create pre-alignment, take first DUT as reference
    prealigned_configuration = dut_alignment.prealign(
        telescope_configuration=initial_configuration,
        input_correlation_file=os.path.join(output_folder, 'Correlation.h5'),
        reduce_background=True,
        select_reference_dut=0)

    # Merge all cluster tables into a single table
    hit_analysis.merge_cluster_data(
        telescope_configuration=initial_configuration,
        input_cluster_files=cluster_files,
        output_merged_file=os.path.join(output_folder, 'Merged.h5'))

    # Create alignment, take first and last DUT as reference (telescope DUTs)
    aligned_configuration = dut_alignment.align(
        telescope_configuration=prealigned_configuration,
        input_merged_file=os.path.join(output_folder, 'Merged.h5'),
        select_duts=[[0, 1, 2, 3]],  # align all planes at once
        select_telescope_duts=[
            0, 3
        ],  # add outermost planes, z-axis positions are fixed for telescope DUTs, if not stated otherwise (see select_alignment_parameters)
        select_fit_duts=[0, 1, 2, 3],  # use all DUTs for track fit
        select_hit_duts=[[0, 1, 2, 3]],  # require hits in all DUTs
        max_iterations=[
            7
        ],  # number of alignment iterations, the higher the number the more precise
        max_events=(100000),  # limit number of events to speed up alignment
        quality_distances=[(250.0, 50.0), (250.0, 50.0), (250.0, 50.0),
                           (250.0, 50.0)],
        isolation_distances=(1000.0, 1000.0),
        use_limits=True,
        plot=True)

    # Find tracks from the tracklets and create a track candidates table
    track_analysis.find_tracks(telescope_configuration=aligned_configuration,
                               input_merged_file=os.path.join(
                                   output_folder, 'Merged.h5'),
                               output_track_candidates_file=os.path.join(
                                   output_folder,
                                   'TrackCandidates_aligned.h5'),
                               align_to_beam=True)

    # Fit the track candidates, assign quality flags, and create a track table
    track_analysis.fit_tracks(
        telescope_configuration=aligned_configuration,
        input_track_candidates_file=os.path.join(output_folder,
                                                 'TrackCandidates_aligned.h5'),
        output_tracks_file=os.path.join(output_folder, 'Tracks_aligned.h5'),
        select_duts=[0, 1, 2, 3],
        select_fit_duts=(0, 1, 2, 3),
        select_hit_duts=(0, 1, 2, 3),
        exclude_dut_hit=True,
        quality_distances=[(250.0, 50.0), (250.0, 50.0), (250.0, 50.0),
                           (250.0, 50.0)],
        isolation_distances=(1000.0, 1000.0),
        use_limits=False,
        plot=True)

    # Do additional track selection cuts on the tracks table
    data_selection.select_tracks(
        telescope_configuration=aligned_configuration,
        input_tracks_file=os.path.join(output_folder, 'Tracks_aligned.h5'),
        output_tracks_file=os.path.join(output_folder,
                                        'Tracks_aligned_selected.h5'),
        select_duts=[0, 1, 2, 3],
        select_hit_duts=[[1, 2, 3], [0, 2, 3], [0, 1, 3], [0, 1, 2]],
        select_no_hit_duts=None,
        select_quality_duts=[[1, 2, 3], [0, 2, 3], [0, 1, 3], [0, 1, 2]],
        query='(track_chi2 < 10)')

    # Calculate the unconstrained residuals from final tracks to check the alignment
    result_analysis.calculate_residuals(
        telescope_configuration=aligned_configuration,
        input_tracks_file=os.path.join(output_folder,
                                       'Tracks_aligned_selected.h5'),
        output_residuals_file=os.path.join(output_folder,
                                           'Residuals_aligned.h5'),
        select_duts=[0, 1, 2, 3],
        nbins_per_pixel=20,
        use_limits=True)

    # Plotting of the tracks angles of the final tracks
    result_analysis.histogram_track_angle(
        telescope_configuration=aligned_configuration,
        input_tracks_file=os.path.join(output_folder,
                                       'Tracks_aligned_selected.h5'),
        output_track_angle_file=None,
        n_bins=200,
        select_duts=[0, 1, 2, 3],
        plot=True)

    # Plotting of the 2D tracks density of the final tracks
    plot_utils.plot_track_density(
        telescope_configuration=aligned_configuration,
        input_tracks_file=os.path.join(output_folder,
                                       'Tracks_aligned_selected.h5'),
        select_duts=[0, 1, 2, 3])

    # Plotting of the 2D charge distribution of the final tracks
    plot_utils.plot_charge_distribution(
        telescope_configuration=aligned_configuration,
        input_tracks_file=os.path.join(output_folder,
                                       'Tracks_aligned_selected.h5'),
        select_duts=[0, 1, 2, 3])

    # Plotting of some final tracks (or track candidates) from selected event range
    plot_utils.plot_events(
        telescope_configuration=aligned_configuration,
        input_tracks_file=os.path.join(output_folder,
                                       'Tracks_aligned_selected.h5'),
        output_pdf_file=os.path.join(output_folder, 'Events.pdf'),
        select_duts=[1],
        event_range=(0, 40))

    # Create final efficiency plots from final tracks
    result_analysis.calculate_efficiency(
        telescope_configuration=aligned_configuration,
        input_tracks_file=os.path.join(output_folder,
                                       'Tracks_aligned_selected.h5'),
        output_efficiency_file=os.path.join(output_folder, 'Efficiency.h5'),
        select_duts=[0, 1, 2, 3],
        resolutions=(250, 50),
        extend_areas=(2000, 2000),
        plot_ranges=None,
        efficiency_regions=None,
        minimum_track_density=1,
        cut_distances=(1000.0, 1000.0))
示例#4
0
def run_analysis(n_events):
    # Start simulator with random seed 0
    sim = SimulateData(random_seed=0)

    # All simulator std. settings are listed here and can be changed
    # Dimensions are in um, angles in mRad, temperatures in Kelvin
    # voltages in Volt

    # General setup
    sim.n_duts = 6  # Number of DUTs in the simulation
    sim.z_positions = [i * 10000 for i in range(sim.n_duts)]
    sim.offsets = [(-10000 + 111 * 0., -10000 + 111 * 0.)
                   for i in range(sim.n_duts)]
    sim.rotations = [(0, 0, 0)] * sim.n_duts  # in rotation around x, y, z axis
    sim.temperature = 300  # needed for charge sharing calculation

    # Beam related settings
    sim.beam_position = (0, 0)  # Average beam position in x, y at z = 0
    sim.beam_position_sigma = (2000, 2000)  # in x, y at z = 0
    sim.beam_momentum = 3200  # MeV
    sim.beam_angle = 0  # Average beam angle in theta at z = 0
    sim.beam_angle_sigma = 2  # Deviation of average beam angle in theta
    sim.tracks_per_event = 3  # Average number of tracks per event
    # Deviation from the average number of tracks
    # Allows for no track per event possible!
    sim.tracks_per_event_sigma = 1

    # Device specific settings
    sim.dut_bias = [50] * sim.n_duts  # Sensor bias voltage
    sim.dut_thickness = [200] * sim.n_duts  # Sensor thickness
    # Detection threshold for each device in electrons, influences efficiency!
    sim.dut_threshold = [0.] * sim.n_duts
    sim.dut_noise = [0.] * sim.n_duts  # Noise for each device in electrons
    sim.dut_pixel_size = [(50, 18.4)] * sim.n_duts  # Pixel size in x / y
    sim.dut_n_pixel = [(400, 1100)] * sim.n_duts  # Number of pixel in x / y
    # Efficiency for each device from 0. to 1. for hits above threshold
    sim.dut_efficiencies = [1.] * sim.n_duts
    # The effective material budget (sensor + passive compoonents) given in
    # total material distance / total radiation length
    # (https://cdsweb.cern.ch/record/1279627/files/PH-EP-Tech-Note-2010-013.pdf)
    # 0 means no multiple scattering; std. setting is the sensor thickness made
    # of silicon as material budget
    sim.dut_material_budget = [
        sim.dut_thickness[i] * 1e-4 / 9.370 for i in range(sim.n_duts)
    ]
    # Digitization settings
    sim.digitization_charge_sharing = True
    # Shuffle hits per event to challenge track finding
    sim.digitization_shuffle_hits = True
    # Translate hit position on DUT plane to channel indices (column / row)
    sim.digitization_pixel_discretization = True

    # Create the data
    output_folder = 'simulation'  # Define a folder for output data
    if not os.path.exists(output_folder):
        os.makedirs(output_folder)
    sim.create_data_and_store(os.path.join(output_folder, 'simulated_data'),
                              n_events=n_events)

    # The simulated data files, one file per DUT
    data_files = [
        os.path.join(output_folder, r'simulated_data_DUT%d.h5' % i)
        for i in range(sim.n_duts)
    ]

    # The following shows a complete test beam analysis by calling the separate
    # function in correct order

    # Cluster hits from all DUTs
    for i, data_file in enumerate(data_files):
        hit_analysis.cluster_hits(input_hits_file=data_file,
                                  min_hit_charge=1,
                                  max_hit_charge=2**16,
                                  column_cluster_distance=1,
                                  row_cluster_distance=1,
                                  frame_cluster_distance=2,
                                  dut_name=data_files[i])

    # Generate filenames for cluster data
    input_cluster_files = [
        os.path.splitext(data_file)[0] + '_clustered.h5'
        for data_file in data_files
    ]

    # Correlate the row / column of each DUT
    dut_alignment.correlate_cluster(input_cluster_files=input_cluster_files,
                                    output_correlation_file=os.path.join(
                                        output_folder, 'Correlation.h5'),
                                    n_pixels=sim.dut_n_pixel,
                                    pixel_size=sim.dut_pixel_size)

    # Create alignment data for the DUT positions to the first DUT from the
    # correlation data. When needed, set offset and error cut for each DUT
    # as list of tuples
    dut_alignment.prealignment(
        input_correlation_file=os.path.join(output_folder, 'Correlation.h5'),
        output_alignment_file=os.path.join(output_folder, 'Alignment.h5'),
        z_positions=sim.z_positions,
        pixel_size=sim.dut_pixel_size,
        # Deactivate if you have a large dataset, enhances alignment slightly
        no_fit=True,
        fit_background=False
        if not (sim.tracks_per_event or sim.tracks_per_event_sigma) else True,
        # Tries to find cuts automatically; deactivate to do this manualy
        non_interactive=True)

    # Correct all DUT hits via alignment information and merge the cluster
    # tables to one tracklets table aligned at the event number
    dut_alignment.merge_cluster_data(input_cluster_files=input_cluster_files,
                                     output_merged_file=os.path.join(
                                         output_folder, 'Merged.h5'),
                                     n_pixels=sim.dut_n_pixel,
                                     pixel_size=sim.dut_pixel_size)

    dut_alignment.apply_alignment(
        input_hit_file=os.path.join(output_folder, 'Merged.h5'),
        input_alignment_file=os.path.join(output_folder, 'Alignment.h5'),
        output_hit_file=os.path.join(output_folder, 'Tracklets_prealigned.h5'),
        # If there is already an alignment info in the alignment file this has
        # to be set
        force_prealignment=True)

    # Find tracks from the tracklets and stores the with quality indicator
    # into track candidates table
    track_analysis.find_tracks(
        input_tracklets_file=os.path.join(output_folder,
                                          'Tracklets_prealigned.h5'),
        input_alignment_file=os.path.join(output_folder, 'Alignment.h5'),
        output_track_candidates_file=os.path.join(
            output_folder, 'TrackCandidates_prealigned.h5'))

    # Fit the track candidates and create new track table
    track_analysis.fit_tracks(
        input_track_candidates_file=os.path.join(
            output_folder, 'TrackCandidates_prealigned.h5'),
        input_alignment_file=os.path.join(output_folder, 'Alignment.h5'),
        output_tracks_file=os.path.join(output_folder, 'Tracks_prealigned.h5'),
        # To get unconstrained residuals do not use DUT hit for track fit
        exclude_dut_hit=True,
        selection_track_quality=0,
        # To get close to excact efficiency heavily avoid merged tracks
        min_track_distance=1000,
        force_prealignment=True)

    result_analysis.calculate_efficiency(
        input_tracks_file=os.path.join(output_folder, 'Tracks_prealigned.h5'),
        input_alignment_file=os.path.join(output_folder, 'Alignment.h5'),
        output_efficiency_file=os.path.join(output_folder, 'Efficiency.h5'),
        bin_size=[(250, 50)],
        sensor_size=[(250. * 80, 50. * 336)],
        minimum_track_density=2,
        use_duts=None,
        cut_distance=500,
        max_distance=500,
        col_range=None,
        row_range=None,
        pixel_size=sim.dut_pixel_size,
        n_pixels=sim.dut_n_pixel,
        force_prealignment=True,
        show_inefficient_events=True)