def select_tracks(telescope_configuration,
                  input_tracks_file,
                  select_duts,
                  output_tracks_file=None,
                  condition=None,
                  max_events=None,
                  select_hit_duts=None,
                  select_no_hit_duts=None,
                  select_quality_duts=None,
                  select_no_quality_duts=None,
                  chunk_size=1000000):
    ''' Selecting tracks that are matching the conditions.

    Parameters
    ----------
    telescope_configuration : string
        Filename of the telescope configuration file.
    input_tracks_file : string
        Filename of the input tracks file.
    '''
    telescope = Telescope(telescope_configuration)
    logging.info('=== Selecting tracks of %d DUTs ===' % len(select_duts))

    if not output_tracks_file:
        output_tracks_file = os.path.splitext(
            input_tracks_file)[0] + '_selected.h5'

    # Check select_duts
    # Check for value errors
    if not isinstance(select_duts, Iterable):
        raise ValueError("select_duts is no iterable")
    elif not select_duts:  # empty iterable
        raise ValueError("select_duts has no items")
    # Check if only non-iterable in iterable
    if not all(map(lambda val: isinstance(val, (int, )), select_duts)):
        raise ValueError("not all items in select_duts are integer")

    # Create select_hit_duts
    if select_hit_duts is None:  # If None, use no selection
        select_hit_duts = [[] for _ in select_duts]
    # Check iterable and length
    if not isinstance(select_hit_duts, Iterable):
        raise ValueError("select_hit_duts is no iterable")
    elif not select_hit_duts:  # empty iterable
        raise ValueError("select_hit_duts has no items")
    # Check if only non-iterable in iterable
    if all(map(lambda val: not isinstance(val, Iterable), select_hit_duts)):
        select_hit_duts = [select_hit_duts[:] for _ in select_duts]
    # Check if only iterable in iterable
    if not all(map(lambda val: isinstance(val, Iterable), select_hit_duts)):
        raise ValueError("not all items in select_hit_duts are iterable")
    # Finally check length of all arrays
    if len(select_hit_duts) != len(select_duts):  # empty iterable
        raise ValueError("select_hit_duts has the wrong length")

    # Create select_no_hit_duts
    if select_no_hit_duts is None:  # If None, use no selection
        select_no_hit_duts = [[] for _ in select_duts]
    # Check iterable and length
    if not isinstance(select_no_hit_duts, Iterable):
        raise ValueError("select_no_hit_duts is no iterable")
    elif not select_no_hit_duts:  # empty iterable
        raise ValueError("select_no_hit_duts has no items")
    # Check if only non-iterable in iterable
    if all(map(lambda val: not isinstance(val, Iterable), select_no_hit_duts)):
        select_no_hit_duts = [select_no_hit_duts[:] for _ in select_duts]
    # Check if only iterable in iterable
    if not all(map(lambda val: isinstance(val, Iterable), select_no_hit_duts)):
        raise ValueError("not all items in select_no_hit_duts are iterable")
    # Finally check length of all arrays
    if len(select_no_hit_duts) != len(select_duts):  # empty iterable
        raise ValueError("select_no_hit_duts has the wrong length")

    # Create select_quality_duts
    if select_quality_duts is None:  # If None, use no selection
        select_quality_duts = [[] for _ in select_duts]
    # Check iterable and length
    if not isinstance(select_quality_duts, Iterable):
        raise ValueError("select_quality_duts is no iterable")
    elif not select_quality_duts:  # empty iterable
        raise ValueError("select_quality_duts has no items")
    # Check if only non-iterable in iterable
    if all(map(lambda val: not isinstance(val, Iterable),
               select_quality_duts)):
        select_quality_duts = [select_quality_duts[:] for _ in select_duts]
    # Check if only iterable in iterable
    if not all(map(lambda val: isinstance(val, Iterable),
                   select_quality_duts)):
        raise ValueError("not all items in select_quality_duts are iterable")
    # Finally check length of all arrays
    if len(select_quality_duts) != len(select_duts):  # empty iterable
        raise ValueError("select_quality_duts has the wrong length")

    # Create select_no_quality_duts
    if select_no_quality_duts is None:  # If None, use no selection
        select_no_quality_duts = [[] for _ in select_duts]
    # Check iterable and length
    if not isinstance(select_no_quality_duts, Iterable):
        raise ValueError("select_no_quality_duts is no iterable")
    elif not select_no_quality_duts:  # empty iterable
        raise ValueError("select_no_quality_duts has no items")
    # Check if only non-iterable in iterable
    if all(
            map(lambda val: not isinstance(val, Iterable),
                select_no_quality_duts)):
        select_no_quality_duts = [
            select_no_quality_duts[:] for _ in select_duts
        ]
    # Check if only iterable in iterable
    if not all(
            map(lambda val: isinstance(val, Iterable),
                select_no_quality_duts)):
        raise ValueError(
            "not all items in select_no_quality_duts are iterable")
    # Finally check length of all arrays
    if len(select_no_quality_duts) != len(select_duts):  # empty iterable
        raise ValueError("select_no_quality_duts has the wrong length")

    # Create condition
    if condition is None:  # If None, use empty strings for all DUTs
        condition = ['' for _ in select_duts]
    # Check if iterable
    if isinstance(condition, str):
        condition = [condition] * len(select_duts)
    # Check if only strings in iterable
    if not all(map(lambda val: isinstance(val, str), condition)):
        raise ValueError("not all items in condition are strings")
    # Finally check length of all arrays
    if len(condition) != len(select_duts):  # empty iterable
        raise ValueError("condition has the wrong length")

    with tb.open_file(input_tracks_file, mode='r') as in_file_h5:
        with tb.open_file(output_tracks_file, mode="w") as out_file_h5:
            for index, actual_dut_index in enumerate(select_duts):
                node = in_file_h5.get_node(in_file_h5.root,
                                           'Tracks_DUT%d' % actual_dut_index)
                logging.info('== Selecting tracks for %s ==',
                             telescope[actual_dut_index].name)

                hit_flags = 0
                hit_mask = 0
                for dut in select_hit_duts[index]:
                    hit_flags |= (1 << dut)
                    hit_mask |= (1 << dut)
                for dut in select_no_hit_duts[index]:
                    hit_mask |= (1 << dut)
                quality_flags = 0
                quality_mask = 0
                for dut in select_quality_duts[index]:
                    quality_flags |= (1 << dut)
                    quality_mask |= (1 << dut)
                for dut in select_no_quality_duts[index]:
                    quality_mask |= (1 << dut)

                tracks_table_out = out_file_h5.create_table(
                    where=out_file_h5.root,
                    name=node.name,
                    description=node.dtype,
                    title=node.title,
                    filters=tb.Filters(complib='blosc',
                                       complevel=5,
                                       fletcher32=False))

                total_n_tracks = node.shape[0]
                total_n_tracks_stored = 0
                total_n_events_stored = 0
                progress_bar = tqdm(total=total_n_tracks, ncols=80)

                for tracks, index_chunk in analysis_utils.data_aligned_at_events(
                        node, chunk_size=chunk_size):
                    n_tracks_chunk = tracks.shape[0]

                    if hit_mask != 0 or quality_mask != 0:
                        select = np.ones(n_tracks_chunk, dtype=np.bool)
                        if hit_mask != 0:
                            select &= ((tracks['hit_flag']
                                        & hit_mask) == hit_flags)
                        if quality_mask != 0:
                            select &= ((tracks['quality_flag']
                                        & quality_mask) == quality_flags)
                        tracks = tracks[select]
                    if condition[index]:
                        tracks = _select_rows_with_condition(
                            tracks, condition[index])

                    unique_events = np.unique(tracks["event_number"])
                    n_events_chunk = unique_events.shape[0]

                    # print "n_events_chunk", n_events_chunk
                    # print "n_tracks_chunk", n_tracks_chunk
                    if max_events:
                        if total_n_tracks == index_chunk:  # last chunk, adding all remaining events
                            select_n_events = max_events - total_n_events_stored
                        elif total_n_events_stored == 0:  # first chunk
                            select_n_events = int(
                                round(max_events *
                                      (n_tracks_chunk / total_n_tracks)))
                        else:
                            # calculate correction of number of selected events
                            correction = (total_n_tracks - index_chunk)/total_n_tracks * 1 / (((total_n_tracks-last_index_chunk)/total_n_tracks)/((max_events-total_n_events_stored_last)/max_events)) \
                                         + (index_chunk)/total_n_tracks * 1 / (((last_index_chunk)/total_n_tracks)/((total_n_events_stored_last)/max_events))
                            #                         select_n_events = np.ceil(n_events_chunk * correction)
                            #                         # calculate correction of number of selected events
                            #                         correction = 1/(((total_n_tracks-last_index_chunk)/total_n_tracks_last)/((max_events-total_n_events_stored_last)/max_events))
                            select_n_events = int(
                                round(max_events *
                                      (n_tracks_chunk / total_n_tracks) *
                                      correction))
                            # print "correction", correction
                        # do not store more events than in current chunk
                        select_n_events = min(n_events_chunk, select_n_events)
                        # do not store more events than given by max_events
                        select_n_events = min(
                            select_n_events,
                            max_events - total_n_events_stored)
                        np.random.seed(seed=0)
                        selected_events = np.random.choice(
                            unique_events, size=select_n_events, replace=False)
                        store_n_events = selected_events.shape[0]
                        total_n_events_stored += store_n_events
                        # print "store_n_events", store_n_events
                        selected_tracks = np.in1d(tracks["event_number"],
                                                  selected_events)
                        store_n_tracks = np.count_nonzero(selected_tracks)
                        # TODO: total_n_tracks_stored not used...
                        total_n_tracks_stored += store_n_tracks
                        tracks = tracks[selected_tracks]

                    tracks_table_out.append(tracks)
                    tracks_table_out.flush()
                    total_n_events_stored_last = total_n_events_stored
                    total_n_tracks_last = total_n_tracks
                    last_index_chunk = index_chunk
                    progress_bar.update(index_chunk)
                progress_bar.close()
def run_analysis(hit_files):
    # Create output subfolder where all output data and plots are stored
    output_folder = os.path.join(
        os.path.split(hit_files[0])[0], 'output_eutelescope')
    if not os.path.exists(output_folder):
        os.makedirs(output_folder)

    mask_files = [(os.path.splitext(hit_file)[0] + '_mask.h5')
                  for hit_file in hit_files]
    cluster_files = [(os.path.splitext(hit_file)[0] + '_clustered.h5')
                     for hit_file in hit_files]

    z_positions = [0.0, 150000.0, 300000.0, 450000.0, 600000.0,
                   750000.0]  # in um
    material_budget = [
        100.0 / 125390.0, 100.0 / 125390.0, 100.0 / 125390.0, 100.0 / 125390.0,
        100.0 / 125390.0, 100.0 / 125390.0
    ]
    initial_configuration = os.path.join(output_folder, 'telescope.yaml')
    telescope = Telescope()
    telescope.add_dut(dut_type="Mimosa26",
                      dut_id=0,
                      translation_x=0,
                      translation_y=0,
                      translation_z=z_positions[0],
                      rotation_alpha=0,
                      rotation_beta=0,
                      rotation_gamma=0,
                      material_budget=material_budget[0],
                      name="Telescope 1")
    telescope.add_dut(dut_type="Mimosa26",
                      dut_id=1,
                      translation_x=0,
                      translation_y=0,
                      translation_z=z_positions[1],
                      rotation_alpha=0,
                      rotation_beta=0,
                      rotation_gamma=0,
                      material_budget=material_budget[1],
                      name="Telescope 2")
    telescope.add_dut(dut_type="Mimosa26",
                      dut_id=2,
                      translation_x=0,
                      translation_y=0,
                      translation_z=z_positions[2],
                      rotation_alpha=0,
                      rotation_beta=0,
                      rotation_gamma=0,
                      material_budget=material_budget[2],
                      name="Telescope 3")
    telescope.add_dut(dut_type="Mimosa26",
                      dut_id=3,
                      translation_x=0,
                      translation_y=0,
                      translation_z=z_positions[3],
                      rotation_alpha=0,
                      rotation_beta=0,
                      rotation_gamma=0,
                      material_budget=material_budget[3],
                      name="Telescope 4")
    telescope.add_dut(dut_type="Mimosa26",
                      dut_id=4,
                      translation_x=0,
                      translation_y=0,
                      translation_z=z_positions[4],
                      rotation_alpha=0,
                      rotation_beta=0,
                      rotation_gamma=0,
                      material_budget=material_budget[4],
                      name="Telescope 5")
    telescope.add_dut(dut_type="Mimosa26",
                      dut_id=5,
                      translation_x=0,
                      translation_y=0,
                      translation_z=z_positions[5],
                      rotation_alpha=0,
                      rotation_beta=0,
                      rotation_gamma=0,
                      material_budget=material_budget[5],
                      name="Telescope 6")
    telescope.save_configuration(initial_configuration)
    prealigned_configuration = os.path.join(output_folder,
                                            'telescope_prealigned.yaml')
    aligned_configuration = os.path.join(output_folder,
                                         'telescope_aligned.yaml')

    check_files = hit_analysis.check(
        telescope_configuration=initial_configuration,
        input_hit_files=hit_files)

    # Generate noisy pixel mask for all DUTs
    thresholds = [2, 2, 2, 2, 2, 2]
    # last plane has noisy cluster, use larger median filter to mask cluster
    pixel_mask_names = [
        "NoisyPixelMask", "NoisyPixelMask", "NoisyPixelMask", "NoisyPixelMask",
        "NoisyPixelMask", "DisabledPixelMask"
    ]
    mask_files = hit_analysis.mask(
        telescope_configuration=initial_configuration,
        input_hit_files=hit_files,
        pixel_mask_names=pixel_mask_names,
        thresholds=thresholds)

    # Cluster hits from all DUTs
    use_positions = [False, False, False, False, False, False]
    min_hit_charges = [0, 0, 0, 0, 0, 0]
    max_hit_charges = [1, 1, 1, 1, 1, 1]
    column_cluster_distances = [3, 3, 3, 3, 3, 3]
    row_cluster_distances = [3, 3, 3, 3, 3, 3]
    frame_cluster_distances = [0, 0, 0, 0, 0, 0]
    cluster_files = hit_analysis.cluster(
        telescope_configuration=initial_configuration,
        select_duts=None,
        input_hit_files=hit_files,
        input_mask_files=[
            None if val else mask_files[i]
            for i, val in enumerate(use_positions)
        ],
        use_positions=use_positions,
        min_hit_charges=min_hit_charges,
        max_hit_charges=max_hit_charges,
        column_cluster_distances=column_cluster_distances,
        row_cluster_distances=row_cluster_distances,
        frame_cluster_distances=frame_cluster_distances)

    # Correlate each DUT with the first DUT
    hit_analysis.correlate(telescope_configuration=initial_configuration,
                           input_files=cluster_files,
                           output_correlation_file=os.path.join(
                               output_folder, 'Correlation.h5'),
                           resolution=(50.0, 50.0),
                           select_reference_duts=0)

    # Create pre-alignment, take first DUT as reference
    prealigned_configuration = dut_alignment.prealign(
        telescope_configuration=initial_configuration,
        input_correlation_file=os.path.join(output_folder, 'Correlation.h5'),
        reduce_background=True,
        select_reference_dut=0)

    # Merge all cluster tables into a single table
    hit_analysis.merge_cluster_data(
        telescope_configuration=initial_configuration,
        input_cluster_files=cluster_files,
        output_merged_file=os.path.join(output_folder, 'Merged.h5'))

    # Example 1:
    # The following 4 steps are for demonstration purpose only.
    # They show track finding, fitting and selection, and residual calculation on pre-aligned hits.
    # Usually you would not do this and you would use fully aligned hits instead.

    # Step 1:
    # Find tracks from the tracklets and create a track candidates table
    track_analysis.find_tracks(
        telescope_configuration=prealigned_configuration,
        input_merged_file=os.path.join(output_folder, 'Merged.h5'),
        output_track_candidates_file=os.path.join(
            output_folder, 'TrackCandidates_prealigned.h5'),
        align_to_beam=True)

    # Step 2:
    # Fit the track candidates, assign quality flags, and create a track table
    track_analysis.fit_tracks(telescope_configuration=prealigned_configuration,
                              input_track_candidates_file=os.path.join(
                                  output_folder,
                                  'TrackCandidates_prealigned.h5'),
                              output_tracks_file=os.path.join(
                                  output_folder, 'Tracks_prealigned.h5'),
                              select_duts=[0, 1, 2, 3, 4, 5],
                              select_fit_duts=[0, 1, 2, 3, 4, 5],
                              select_hit_duts=[0, 1, 2, 3, 4, 5],
                              exclude_dut_hit=True,
                              isolation_distances=(1000.0, 1000.0),
                              use_limits=False,
                              plot=True)

    # Step 3:
    # Do additional track selection cuts on the tracks table
    data_selection.select_tracks(
        telescope_configuration=prealigned_configuration,
        input_tracks_file=os.path.join(output_folder, 'Tracks_prealigned.h5'),
        output_tracks_file=os.path.join(output_folder,
                                        'Tracks_prealigned_selected.h5'),
        select_duts=[0, 1, 2, 3, 4, 5],
        select_hit_duts=[0, 1, 2, 3, 4, 5],
        select_no_hit_duts=None,
        select_quality_duts=[[1, 2, 3, 4, 5], [0, 2, 3, 4, 5], [0, 1, 3, 4, 5],
                             [0, 1, 2, 4, 5], [0, 1, 2, 3, 5], [0, 1, 2, 3,
                                                                4]],
        query='(track_chi2 < 500)')

    # Step 4:
    # Calculate the unconstrained residuals from pre-aligned tracks to check the pre-alignment
    result_analysis.calculate_residuals(
        telescope_configuration=prealigned_configuration,
        input_tracks_file=os.path.join(output_folder,
                                       'Tracks_prealigned_selected.h5'),
        output_residuals_file=os.path.join(output_folder,
                                           'Residuals_prealigned.h5'),
        select_duts=[0, 1, 2, 3, 4, 5],
        nbins_per_pixel=20,
        use_limits=True)

    # Create alignment, take first and last DUT as reference ("select_telescope_duts" parameter)
    # The position (translation and rotation) of telescope DUTs are not changed
    aligned_configuration = dut_alignment.align(
        telescope_configuration=prealigned_configuration,
        input_merged_file=os.path.join(output_folder, 'Merged.h5'),
        select_duts=[[0, 1, 2, 3, 4, 5]],  # align the telescope planes first
        select_telescope_duts=[0, 1, 2, 3, 4, 5],  # telescope planes
        select_fit_duts=[[0, 1, 2, 3, 4, 5]],
        select_hit_duts=[[0, 1, 2, 3, 4, 5]],
        max_iterations=[5],
        max_events=(100000),
        track_chi2=15.0,
        quality_distances=[(18.4 * 2, 18.4 * 2), (18.4 * 2, 18.4 * 2),
                           (18.4 * 2, 18.4 * 2), (18.4 * 2, 18.4 * 2),
                           (18.4 * 2, 18.4 * 2), (18.4 * 2, 18.4 * 2)],
        isolation_distances=(1000.0, 1000.0),
        use_limits=True,
        plot=True)

    # Find tracks from the tracklets and create a track candidates table
    track_analysis.find_tracks(telescope_configuration=aligned_configuration,
                               input_merged_file=os.path.join(
                                   output_folder, 'Merged.h5'),
                               output_track_candidates_file=os.path.join(
                                   output_folder,
                                   'TrackCandidates_aligned.h5'),
                               align_to_beam=True)

    # Fit the track candidates, assign quality flags, and create a track table
    track_analysis.fit_tracks(
        telescope_configuration=aligned_configuration,
        input_track_candidates_file=os.path.join(output_folder,
                                                 'TrackCandidates_aligned.h5'),
        output_tracks_file=os.path.join(output_folder, 'Tracks_aligned.h5'),
        select_duts=[0, 1, 2, 3, 4, 5],
        select_fit_duts=[0, 1, 2, 3, 4, 5],
        select_hit_duts=[0, 1, 2, 3, 4, 5],
        exclude_dut_hit=True,
        quality_distances=[(18.4 * 2, 18.4 * 2), (18.4 * 2, 18.4 * 2),
                           (18.4 * 2, 18.4 * 2), (18.4 * 2, 18.4 * 2),
                           (18.4 * 2, 18.4 * 2), (18.4 * 2, 18.4 * 2)],
        isolation_distances=(1000.0, 1000.0),
        use_limits=False,
        plot=True)

    # Calculate the unconstrained residuals from all tracks
    result_analysis.calculate_residuals(
        telescope_configuration=aligned_configuration,
        input_tracks_file=os.path.join(output_folder, 'Tracks_aligned.h5'),
        output_residuals_file=os.path.join(output_folder,
                                           'Residuals_aligned.h5'),
        select_duts=[0, 1, 2, 3, 4, 5],
        nbins_per_pixel=20,
        use_limits=True)

    # Do additional track selection cuts on the tracks table
    data_selection.select_tracks(
        telescope_configuration=aligned_configuration,
        input_tracks_file=os.path.join(output_folder, 'Tracks_aligned.h5'),
        output_tracks_file=os.path.join(output_folder,
                                        'Tracks_aligned_selected.h5'),
        select_duts=[0, 1, 2, 3, 4, 5],
        select_hit_duts=[0, 1, 2, 3, 4, 5],
        select_no_hit_duts=None,
        select_quality_duts=[[1, 2, 3, 4, 5], [0, 2, 3, 4, 5], [0, 1, 3, 4, 5],
                             [0, 1, 2, 4, 5], [0, 1, 2, 3, 5], [0, 1, 2, 3,
                                                                4]],
        query='(track_chi2 < 15.0)')

    # Calculate the unconstrained residuals from final tracks (with chi^2 cut and quality selection)
    result_analysis.calculate_residuals(
        telescope_configuration=aligned_configuration,
        input_tracks_file=os.path.join(output_folder,
                                       'Tracks_aligned_selected.h5'),
        output_residuals_file=os.path.join(output_folder,
                                           'Residuals_aligned_selected.h5'),
        select_duts=[0, 1, 2, 3, 4, 5],
        nbins_per_pixel=20,
        use_limits=True)

    # Example 2:
    # Use only 2 DUTs next to the fit DUT and cut on track quality.
    # Thus the track fit is just a track interpolation with chi2 = 0.
    # This is better here due to heavily scatterd tracks, where a straight line
    # assumption for all DUTs is wrong.
    # This leads to symmetric residuals in x and y for all DUTs between 2 DUTs
    # (= DUTs: 1, 2, 3, 4)
    track_analysis.fit_tracks(
        telescope_configuration=aligned_configuration,
        input_track_candidates_file=os.path.join(output_folder,
                                                 'TrackCandidates_aligned.h5'),
        output_tracks_file=os.path.join(output_folder, 'Tracks_pair.h5'),
        select_duts=[0, 1, 2, 3, 4, 5],
        select_fit_duts=[
            [1, 2],  # Only select DUTs next to the DUT to fit
            [0, 2],
            [1, 3],
            [2, 4],
            [3, 5],
            [3, 4]
        ],
        exclude_dut_hit=True,
        quality_distances=[(18.4 * 2, 18.4 * 2), (18.4 * 2, 18.4 * 2),
                           (18.4 * 2, 18.4 * 2), (18.4 * 2, 18.4 * 2),
                           (18.4 * 2, 18.4 * 2), (18.4 * 2, 18.4 * 2)],
        isolation_distances=(1000.0, 1000.0),
        use_limits=False,
        plot=True)

    # Do additional track selection cuts on the tracks table
    data_selection.select_tracks(
        telescope_configuration=aligned_configuration,
        input_tracks_file=os.path.join(output_folder, 'Tracks_pair.h5'),
        output_tracks_file=os.path.join(output_folder,
                                        'Tracks_pair_selected.h5'),
        select_duts=[0, 1, 2, 3, 4, 5],
        select_hit_duts=[0, 1, 2, 3, 4, 5],
        select_no_hit_duts=None,
        select_quality_duts=[[1, 2, 3, 4, 5], [0, 2, 3, 4, 5], [0, 1, 3, 4, 5],
                             [0, 1, 2, 4, 5], [0, 1, 2, 3, 5], [0, 1, 2, 3,
                                                                4]],
        query='(track_chi2 < 5.0)')

    # Calculate the unconstrained residuals from final from final tracks (with chi^2 cut and quality selection)
    result_analysis.calculate_residuals(
        telescope_configuration=aligned_configuration,
        input_tracks_file=os.path.join(output_folder,
                                       'Tracks_pair_selected.h5'),
        output_residuals_file=os.path.join(output_folder,
                                           'Residuals_pair_selected.h5'),
        select_duts=[0, 1, 2, 3, 4, 5],
        nbins_per_pixel=20,
        use_limits=True)
        os.path.dirname(
            os.path.abspath(inspect.getfile(inspect.currentframe()))), 'data')

    # Create output subfolder where all output data and plots are stored
    output_folder = os.path.join(tests_data_folder, 'output_kalman_filter')
    if not os.path.exists(output_folder):
        os.makedirs(output_folder)

    z_positions = [
        0.0, 29900.0, 60300.0, 82100.0, 118700.0, 160700.0, 197800.0
    ]
    material_budget = [
        100.0 / 125390.0, 100.0 / 125390.0, 100.0 / 125390.0, 100.0 / 125390.0,
        100.0 / 125390.0, 100.0 / 125390.0, 250.0 / 93700
    ]
    telescope = Telescope()
    telescope.add_dut(dut_type="Mimosa26",
                      dut_id=0,
                      translation_x=0,
                      translation_y=0,
                      translation_z=z_positions[0],
                      rotation_alpha=0,
                      rotation_beta=0,
                      rotation_gamma=0,
                      material_budget=material_budget[0],
                      name="Telescope 1")
    telescope.add_dut(dut_type="Mimosa26",
                      dut_id=1,
                      translation_x=0,
                      translation_y=0,
                      translation_z=z_positions[1],
def run_analysis(hit_files):
    # Create output subfolder where all output data and plots are stored
    output_folder = os.path.join(
        os.path.split(hit_files[0])[0], 'output_fei4_telescope')
    if not os.path.exists(output_folder):
        os.makedirs(output_folder)

    mask_files = [(os.path.splitext(hit_file)[0] + '_mask.h5')
                  for hit_file in hit_files]
    cluster_files = [(os.path.splitext(hit_file)[0] + '_clustered.h5')
                     for hit_file in hit_files]

    z_positions = [0.0, 19500.0, 108800.0, 128300.0]  # in um
    initial_configuration = os.path.join(output_folder, 'telescope.yaml')
    telescope = Telescope()
    telescope.add_dut(dut_type="FEI4",
                      dut_id=0,
                      translation_x=0,
                      translation_y=0,
                      translation_z=z_positions[0],
                      rotation_alpha=0,
                      rotation_beta=0,
                      rotation_gamma=0,
                      name="Telescope 1")
    telescope.add_dut(dut_type="FEI4",
                      dut_id=1,
                      translation_x=0,
                      translation_y=0,
                      translation_z=z_positions[1],
                      rotation_alpha=0,
                      rotation_beta=0,
                      rotation_gamma=0,
                      name="Telescope 2")
    telescope.add_dut(dut_type="FEI4",
                      dut_id=2,
                      translation_x=0,
                      translation_y=0,
                      translation_z=z_positions[2],
                      rotation_alpha=0,
                      rotation_beta=0,
                      rotation_gamma=0,
                      name="Telescope 3")
    telescope.add_dut(dut_type="FEI4",
                      dut_id=3,
                      translation_x=0,
                      translation_y=0,
                      translation_z=z_positions[3],
                      rotation_alpha=0,
                      rotation_beta=0,
                      rotation_gamma=0,
                      name="Telescope 4")
    telescope.save_configuration(initial_configuration)
    prealigned_configuration = os.path.join(output_folder,
                                            'telescope_prealigned.yaml')
    aligned_configuration = os.path.join(output_folder,
                                         'telescope_aligned.yaml')

    check_files = hit_analysis.check(
        telescope_configuration=initial_configuration,
        input_hit_files=hit_files)

    # Generate noisy pixel mask for all DUTs
    thresholds = [100, 100, 100, 100]
    pixel_mask_names = ["NoisyPixelMask"] * len(thresholds)
    mask_files = hit_analysis.mask(
        telescope_configuration=initial_configuration,
        input_hit_files=hit_files,
        pixel_mask_names=pixel_mask_names,
        thresholds=thresholds)

    # Cluster hits from all DUTs
    use_positions = [False, False, False, False]
    min_hit_charges = [0, 0, 0, 0]
    max_hit_charges = [13, 13, 13, 13]
    column_cluster_distances = [1, 1, 1, 1]
    row_cluster_distances = [3, 3, 3, 3]
    frame_cluster_distances = [4, 4, 4, 4]
    cluster_files = hit_analysis.cluster(
        telescope_configuration=initial_configuration,
        select_duts=None,
        input_hit_files=hit_files,
        input_mask_files=[
            None if val else mask_files[i]
            for i, val in enumerate(use_positions)
        ],
        use_positions=use_positions,
        min_hit_charges=min_hit_charges,
        max_hit_charges=max_hit_charges,
        column_cluster_distances=column_cluster_distances,
        row_cluster_distances=row_cluster_distances,
        frame_cluster_distances=frame_cluster_distances)

    # Correlate each DUT with the first DUT
    hit_analysis.correlate(telescope_configuration=initial_configuration,
                           input_files=cluster_files,
                           output_correlation_file=os.path.join(
                               output_folder, 'Correlation.h5'),
                           resolution=(250.0, 50.0),
                           select_reference_duts=0)

    # Create pre-alignment, take first DUT as reference
    prealigned_configuration = dut_alignment.prealign(
        telescope_configuration=initial_configuration,
        input_correlation_file=os.path.join(output_folder, 'Correlation.h5'),
        reduce_background=True,
        select_reference_dut=0)

    # Merge all cluster tables into a single table
    hit_analysis.merge_cluster_data(
        telescope_configuration=initial_configuration,
        input_cluster_files=cluster_files,
        output_merged_file=os.path.join(output_folder, 'Merged.h5'))

    # Create alignment, take first and last DUT as reference (telescope DUTs)
    aligned_configuration = dut_alignment.align(
        telescope_configuration=prealigned_configuration,
        input_merged_file=os.path.join(output_folder, 'Merged.h5'),
        select_duts=[[0, 1, 2, 3]],  # align all planes at once
        select_telescope_duts=[
            0, 3
        ],  # add outermost planes, z-axis positions are fixed for telescope DUTs, if not stated otherwise (see select_alignment_parameters)
        select_fit_duts=[0, 1, 2, 3],  # use all DUTs for track fit
        select_hit_duts=[[0, 1, 2, 3]],  # require hits in all DUTs
        max_iterations=[
            7
        ],  # number of alignment iterations, the higher the number the more precise
        max_events=(100000),  # limit number of events to speed up alignment
        quality_distances=[(250.0, 50.0), (250.0, 50.0), (250.0, 50.0),
                           (250.0, 50.0)],
        isolation_distances=(1000.0, 1000.0),
        use_limits=True,
        plot=True)

    # Find tracks from the tracklets and create a track candidates table
    track_analysis.find_tracks(telescope_configuration=aligned_configuration,
                               input_merged_file=os.path.join(
                                   output_folder, 'Merged.h5'),
                               output_track_candidates_file=os.path.join(
                                   output_folder,
                                   'TrackCandidates_aligned.h5'),
                               align_to_beam=True)

    # Fit the track candidates, assign quality flags, and create a track table
    track_analysis.fit_tracks(
        telescope_configuration=aligned_configuration,
        input_track_candidates_file=os.path.join(output_folder,
                                                 'TrackCandidates_aligned.h5'),
        output_tracks_file=os.path.join(output_folder, 'Tracks_aligned.h5'),
        select_duts=[0, 1, 2, 3],
        select_fit_duts=(0, 1, 2, 3),
        select_hit_duts=(0, 1, 2, 3),
        exclude_dut_hit=True,
        quality_distances=[(250.0, 50.0), (250.0, 50.0), (250.0, 50.0),
                           (250.0, 50.0)],
        isolation_distances=(1000.0, 1000.0),
        use_limits=False,
        plot=True)

    # Do additional track selection cuts on the tracks table
    data_selection.select_tracks(
        telescope_configuration=aligned_configuration,
        input_tracks_file=os.path.join(output_folder, 'Tracks_aligned.h5'),
        output_tracks_file=os.path.join(output_folder,
                                        'Tracks_aligned_selected.h5'),
        select_duts=[0, 1, 2, 3],
        select_hit_duts=[[1, 2, 3], [0, 2, 3], [0, 1, 3], [0, 1, 2]],
        select_no_hit_duts=None,
        select_quality_duts=[[1, 2, 3], [0, 2, 3], [0, 1, 3], [0, 1, 2]],
        query='(track_chi2 < 10)')

    # Calculate the unconstrained residuals from final tracks to check the alignment
    result_analysis.calculate_residuals(
        telescope_configuration=aligned_configuration,
        input_tracks_file=os.path.join(output_folder,
                                       'Tracks_aligned_selected.h5'),
        output_residuals_file=os.path.join(output_folder,
                                           'Residuals_aligned.h5'),
        select_duts=[0, 1, 2, 3],
        nbins_per_pixel=20,
        use_limits=True)

    # Plotting of the tracks angles of the final tracks
    result_analysis.histogram_track_angle(
        telescope_configuration=aligned_configuration,
        input_tracks_file=os.path.join(output_folder,
                                       'Tracks_aligned_selected.h5'),
        output_track_angle_file=None,
        n_bins=200,
        select_duts=[0, 1, 2, 3],
        plot=True)

    # Plotting of the 2D tracks density of the final tracks
    plot_utils.plot_track_density(
        telescope_configuration=aligned_configuration,
        input_tracks_file=os.path.join(output_folder,
                                       'Tracks_aligned_selected.h5'),
        select_duts=[0, 1, 2, 3])

    # Plotting of the 2D charge distribution of the final tracks
    plot_utils.plot_charge_distribution(
        telescope_configuration=aligned_configuration,
        input_tracks_file=os.path.join(output_folder,
                                       'Tracks_aligned_selected.h5'),
        select_duts=[0, 1, 2, 3])

    # Plotting of some final tracks (or track candidates) from selected event range
    plot_utils.plot_events(
        telescope_configuration=aligned_configuration,
        input_tracks_file=os.path.join(output_folder,
                                       'Tracks_aligned_selected.h5'),
        output_pdf_file=os.path.join(output_folder, 'Events.pdf'),
        select_duts=[1],
        event_range=(0, 40))

    # Create final efficiency plots from final tracks
    result_analysis.calculate_efficiency(
        telescope_configuration=aligned_configuration,
        input_tracks_file=os.path.join(output_folder,
                                       'Tracks_aligned_selected.h5'),
        output_efficiency_file=os.path.join(output_folder, 'Efficiency.h5'),
        select_duts=[0, 1, 2, 3],
        resolutions=(250, 50),
        extend_areas=(2000, 2000),
        plot_ranges=None,
        efficiency_regions=None,
        minimum_track_density=1,
        cut_distances=(1000.0, 1000.0))
Example #5
0
def run_analysis(n_events):
    # Start simulator with random seed 0
    sim = SimulateData(random_seed=0)

    # All simulator std. settings are listed here and can be changed
    # Dimensions are in um, angles in mRad, temperatures in Kelvin
    # voltages in Volt

    # General setup
    sim.n_duts = 6  # Number of DUTs in the simulation
    sim.z_positions = [i * 10000 for i in range(sim.n_duts)]
    sim.offsets = [(-10000 + 111 * 0., -10000 + 111 * 0.)
                   for i in range(sim.n_duts)]
    sim.rotations = [(0, 0, 0)] * sim.n_duts  # in rotation around x, y, z axis
    sim.temperature = 300  # needed for charge sharing calculation

    # Beam related settings
    sim.beam_position = (0, 0)  # Average beam position in x, y at z = 0
    sim.beam_position_sigma = (2000, 2000)  # in x, y at z = 0
    sim.beam_momentum = 3200  # MeV
    sim.beam_angle = 0  # Average beam angle in theta at z = 0
    sim.beam_angle_sigma = 2  # Deviation of average beam angle in theta
    sim.tracks_per_event = 3  # Average number of tracks per event
    # Deviation from the average number of tracks
    # Allows for no track per event possible!
    sim.tracks_per_event_sigma = 1

    # Device specific settings
    sim.dut_bias = [80] * sim.n_duts  # Sensor bias voltage
    sim.dut_thickness = [200] * sim.n_duts  # Sensor thickness
    # Detection threshold for each device in electrons, influences efficiency!
    sim.dut_threshold = [0.] * sim.n_duts
    sim.dut_noise = [0.] * sim.n_duts  # Noise for each device in electrons
    sim.dut_pixel_size = [(250.0, 50.0)] * sim.n_duts  # Pixel size in x / y
    sim.dut_n_pixel = [(80, 336)] * sim.n_duts  # Number of pixel in x / y
    # Efficiency for each device from 0. to 1. for hits above threshold
    sim.dut_efficiencies = [1.] * sim.n_duts
    # The effective material budget (sensor + passive compoonents) given in
    # total material distance / total radiation length
    # (https://cdsweb.cern.ch/record/1279627/files/PH-EP-Tech-Note-2010-013.pdf)
    # 0 means no multiple scattering; std. setting is the sensor thickness made
    # of silicon as material budget
    sim.dut_material_budget = [
        sim.dut_thickness[i] * 1e-4 / 9.370 for i in range(sim.n_duts)
    ]
    # Digitization settings
    sim.digitization_charge_sharing = True
    # Shuffle hits per event to challenge track finding
    sim.digitization_shuffle_hits = True
    # Translate hit position on DUT plane to channel indices (column / row)
    sim.digitization_pixel_discretization = True

    # Create the data
    output_folder = 'simulation'  # Define a folder for output data
    if not os.path.exists(output_folder):
        os.makedirs(output_folder)
    sim.create_data_and_store(os.path.join(output_folder, 'simulated_data'),
                              n_events=n_events)

    # The simulated data files, one file per DUT
    data_files = [
        os.path.join(output_folder, r'simulated_data_DUT%d.h5' % i)
        for i in range(sim.n_duts)
    ]

    initial_configuration = os.path.join(output_folder, 'telescope.yaml')
    telescope = Telescope()
    telescope.add_dut(dut_type="FEI4",
                      dut_id=0,
                      translation_x=0,
                      translation_y=0,
                      translation_z=sim.z_positions[0],
                      rotation_alpha=0,
                      rotation_beta=0,
                      rotation_gamma=0,
                      name="Telescope 1")
    telescope.add_dut(dut_type="FEI4",
                      dut_id=1,
                      translation_x=0,
                      translation_y=0,
                      translation_z=sim.z_positions[1],
                      rotation_alpha=0,
                      rotation_beta=0,
                      rotation_gamma=0,
                      name="Telescope 2")
    telescope.add_dut(dut_type="FEI4",
                      dut_id=2,
                      translation_x=0,
                      translation_y=0,
                      translation_z=sim.z_positions[2],
                      rotation_alpha=0,
                      rotation_beta=0,
                      rotation_gamma=0,
                      name="Telescope 3")
    telescope.add_dut(dut_type="FEI4",
                      dut_id=3,
                      translation_x=0,
                      translation_y=0,
                      translation_z=sim.z_positions[3],
                      rotation_alpha=0,
                      rotation_beta=0,
                      rotation_gamma=0,
                      name="Telescope 4")
    telescope.add_dut(dut_type="FEI4",
                      dut_id=4,
                      translation_x=0,
                      translation_y=0,
                      translation_z=sim.z_positions[4],
                      rotation_alpha=0,
                      rotation_beta=0,
                      rotation_gamma=0,
                      name="Telescope 5")
    telescope.add_dut(dut_type="FEI4",
                      dut_id=5,
                      translation_x=0,
                      translation_y=0,
                      translation_z=sim.z_positions[5],
                      rotation_alpha=0,
                      rotation_beta=0,
                      rotation_gamma=0,
                      name="Telescope 6")
    telescope.save_configuration(initial_configuration)
    prealigned_configuration = os.path.join(output_folder,
                                            'telescope_prealigned.yaml')
    aligned_configuration = os.path.join(output_folder,
                                         'telescope_aligned.yaml')

    # The following shows a complete test beam analysis by calling the separate
    # function in correct order

    # Cluster hits from all DUTs
    cluster_files = hit_analysis.cluster(
        telescope_configuration=initial_configuration,
        input_hit_files=data_files,
        select_duts=None,
        input_mask_files=[None] * sim.n_duts,
        use_positions=[False] * sim.n_duts,
        min_hit_charges=[1] * sim.n_duts,
        max_hit_charges=[2**16] * sim.n_duts,
        column_cluster_distances=[1] * sim.n_duts,
        row_cluster_distances=[1] * sim.n_duts,
        frame_cluster_distances=[2] * sim.n_duts,
    )

    # Generate filenames for cluster data
    # cluster_files = [os.path.splitext(data_file)[0] + '_clustered.h5'
    #                        for data_file in data_files]

    # Correlate the row / column of each DUT
    hit_analysis.correlate(telescope_configuration=initial_configuration,
                           input_files=cluster_files,
                           output_correlation_file=os.path.join(
                               output_folder, 'Correlation.h5'),
                           resolution=(250.0, 50.0),
                           select_reference_duts=0)

    # Create alignment data for the DUT positions to the first DUT from the
    # correlation data. When needed, set offset and error cut for each DUT
    # as list of tuples
    prealigned_configuration = dut_alignment.prealign(
        telescope_configuration=initial_configuration,
        input_correlation_file=os.path.join(output_folder, 'Correlation.h5'),
        reduce_background=True,
        select_reference_dut=0)

    # Merge all cluster tables into a single table
    hit_analysis.merge_cluster_data(
        telescope_configuration=initial_configuration,
        input_cluster_files=cluster_files,
        output_merged_file=os.path.join(output_folder, 'Merged.h5'))

    # Create alignment, take first and last DUT as reference (telescope DUTs)
    aligned_configuration = dut_alignment.align(
        telescope_configuration=prealigned_configuration,
        input_merged_file=os.path.join(output_folder, 'Merged.h5'),
        select_duts=[[0, 1, 2, 3, 4, 5]],  # align all planes at once
        # add outermost planes, z-axis positions are fixed for telescope DUTs, if not stated otherwise (see select_alignment_parameters)
        select_telescope_duts=[0, 5],
        select_fit_duts=[0, 1, 2, 3, 4, 5],  # use all DUTs for track fit
        select_hit_duts=[[0, 1, 2, 3, 4, 5]],  # require hits in all DUTs
        # number of alignment iterations, the higher the number the more precise
        max_iterations=[3],
        max_events=(100000),  # limit number of events to speed up alignment
        quality_distances=[(250.0, 50.0), (250.0, 50.0), (250.0, 50.0),
                           (250.0, 50.0), (250.0, 50.0), (250.0, 50.0)],
        isolation_distances=(1000.0, 1000.0),
        use_limits=True,
        plot=True)

    # Find tracks from the tracklets and stores the with quality indicator
    # into track candidates table
    track_analysis.find_tracks(telescope_configuration=aligned_configuration,
                               input_merged_file=os.path.join(
                                   output_folder, 'Merged.h5'),
                               output_track_candidates_file=os.path.join(
                                   output_folder,
                                   'TrackCandidates_aligned.h5'),
                               align_to_beam=True)

    # Fit the track candidates and create new track table
    track_analysis.fit_tracks(
        telescope_configuration=aligned_configuration,
        input_track_candidates_file=os.path.join(output_folder,
                                                 'TrackCandidates_aligned.h5'),
        output_tracks_file=os.path.join(output_folder, 'Tracks_aligned.h5'),
        select_duts=[0, 1, 2, 3, 4, 5],
        select_fit_duts=(0, 1, 2, 3, 4, 5),
        select_hit_duts=(0, 1, 2, 3, 4, 5),
        exclude_dut_hit=True,
        quality_distances=[(250.0, 50.0), (250.0, 50.0), (250.0, 50.0),
                           (250.0, 50.0), (250.0, 50.0), (250.0, 50.0)],
        isolation_distances=(1000.0, 1000.0),
        use_limits=False,
        plot=True)

    result_analysis.calculate_residuals(
        telescope_configuration=aligned_configuration,
        input_tracks_file=os.path.join(output_folder, 'Tracks_aligned.h5'),
        output_residuals_file=os.path.join(output_folder,
                                           'Residuals_aligned.h5'),
        select_duts=[0, 1, 2, 3, 4, 5],
        nbins_per_pixel=20,
        use_limits=True)
Example #6
0
def select_tracks(telescope_configuration,
                  input_tracks_file,
                  select_duts,
                  output_tracks_file=None,
                  query=None,
                  max_events=None,
                  select_hit_duts=None,
                  select_no_hit_duts=None,
                  select_quality_duts=None,
                  select_isolated_track_duts=None,
                  select_isolated_hit_duts=None,
                  chunk_size=1000000):
    ''' Selecting tracks that are matching the conditions and query strings.

    Parameters
    ----------
    telescope_configuration : string
        Filename of the telescope configuration file.
    input_tracks_file : string
        Filename of the input tracks file.
    select_duts : list
        Selecting DUTs that will be processed.
    output_tracks_file : string
        Filename of the output tracks file.
    query : string or list
        List of query strings for each slected DUT.
        A query is a string that is processed and is used to select data from the table, e.g.,
        "track_chi2 <= 5", where "track_chi2" is a column in the table.
        The data in the output table contains only data with "track_chi2" smaller or equal to 5.
    max_events : uint
        Maximum number of radomly selected events.
    select_hit_duts : list
        List of DUTs for each slected DUT. The DUTs are required to have the hit flag set.
    select_no_hit_duts : list
        List of DUTs for each slected DUT. The DUTs are required to have hit flag not set.
    select_quality_duts : list
        List of DUTs for each slected DUT. The DUTs are required to have the quality flag set.
        The quality flag is only evaluated for DUTs where the hit flag is set.
    select_isolated_track_duts : list
        List of DUTs for each slected DUT. The DUTs are required to have the isolated track flag set.
        The isolated track flag is only evaluated for DUTs where the hit flag is set.
    select_isolated_hit_duts : list
        List of DUTs for each slected DUT. The DUTs are required to have the isolated hit flag set.
        The isolated hit flag is only evaluated for DUTs where the hit flag is set.
    chunk_size : uint
        Chunk size of the data when reading from file.
    '''
    telescope = Telescope(telescope_configuration)
    logging.info('=== Selecting tracks of %d DUTs ===' % len(select_duts))

    if not output_tracks_file:
        output_tracks_file = os.path.splitext(
            input_tracks_file)[0] + '_selected.h5'

    # Check select_duts
    # Check for value errors
    if not isinstance(select_duts, Iterable):
        raise ValueError("Parameter select_duts is not an iterable.")
    elif not select_duts:  # empty iterable
        raise ValueError("Parameter select_duts has no items.")
    # Check if only non-iterable in iterable
    if not all(map(lambda val: isinstance(val, (int, )), select_duts)):
        raise ValueError("Not all items in parameter select_duts are integer.")

    # Create select_hit_duts
    if select_hit_duts is None:  # If None, use no selection
        select_hit_duts = [[] for _ in select_duts]
    # Check iterable and length
    if not isinstance(select_hit_duts, Iterable):
        raise ValueError("Parameter select_hit_duts is not an iterable.")
    elif not select_hit_duts:  # empty iterable
        raise ValueError("Parameter select_hit_duts has no items.")
    # Check if only non-iterable in iterable
    if all(
            map(lambda val: not isinstance(val, Iterable) and val is not None,
                select_hit_duts)):
        select_hit_duts = [select_hit_duts[:] for _ in select_duts]
    # Check if only iterable in iterable
    if not all(
            map(lambda val: isinstance(val, Iterable) or val is None,
                select_hit_duts)):
        raise ValueError(
            "Not all items in parameter select_hit_duts are iterable or None.")
    # Finally check length of all arrays
    if len(select_hit_duts) != len(select_duts):  # empty iterable
        raise ValueError("Parameter select_hit_duts has the wrong length.")

    # Create select_no_hit_duts
    if select_no_hit_duts is None:  # If None, use no selection
        select_no_hit_duts = [[] for _ in select_duts]
    # Check iterable and length
    if not isinstance(select_no_hit_duts, Iterable):
        raise ValueError("Parameter select_no_hit_duts is not an iterable.")
    elif not select_no_hit_duts:  # empty iterable
        raise ValueError("Parameter select_no_hit_duts has no items.")
    # Check if only non-iterable in iterable
    if all(
            map(lambda val: not isinstance(val, Iterable) and val is not None,
                select_no_hit_duts)):
        select_no_hit_duts = [select_no_hit_duts[:] for _ in select_duts]
    # Check if only iterable in iterable
    if not all(
            map(lambda val: isinstance(val, Iterable) or val is None,
                select_no_hit_duts)):
        raise ValueError(
            "Not all items in parameter select_no_hit_duts are iterable or None."
        )
    # Finally check length of all arrays
    if len(select_no_hit_duts) != len(select_duts):  # empty iterable
        raise ValueError("Parameter select_no_hit_duts has the wrong length.")
    for index, item in enumerate(select_no_hit_duts):
        if item is not None and select_hit_duts[index] is not None:
            if set(item) & set(
                    select_hit_duts[index]):  # check for empty intersection
                raise ValueError(
                    "DUT%d cannot have select_hit_duts and select_no_hit_duts set for the same DUTs."
                    % (select_duts[index], ))

    # Create select_quality_duts
    if select_quality_duts is None:  # If None, use no selection
        select_quality_duts = [[] for _ in select_duts]
    # Check iterable and length
    if not isinstance(select_quality_duts, Iterable):
        raise ValueError("Parameter select_quality_duts is not an iterable.")
    elif not select_quality_duts:  # empty iterable
        raise ValueError("Parameter select_quality_duts has no items.")
    # Check if only non-iterable in iterable
    if all(
            map(lambda val: not isinstance(val, Iterable) and val is not None,
                select_quality_duts)):
        select_quality_duts = [select_quality_duts[:] for _ in select_duts]
    # Check if only iterable in iterable
    if not all(
            map(lambda val: isinstance(val, Iterable) or val is None,
                select_quality_duts)):
        raise ValueError(
            "Not all items in parameter select_quality_duts are iterable or None."
        )
    # Finally check length of all arrays
    if len(select_quality_duts) != len(select_duts):  # empty iterable
        raise ValueError("Parameter select_quality_duts has the wrong length.")

    # Create select_isolated_track_duts
    if select_isolated_track_duts is None:  # If None, use no selection
        select_isolated_track_duts = [[] for _ in select_duts]
    # Check iterable and length
    if not isinstance(select_isolated_track_duts, Iterable):
        raise ValueError(
            "Parameter select_isolated_track_duts is not an iterable.")
    elif not select_isolated_track_duts:  # empty iterable
        raise ValueError("Parameter select_isolated_track_duts has no items.")
    # Check if only non-iterable in iterable
    if all(
            map(lambda val: not isinstance(val, Iterable) and val is not None,
                select_isolated_track_duts)):
        select_isolated_track_duts = [
            select_isolated_track_duts[:] for _ in select_duts
        ]
    # Check if only iterable in iterable
    if not all(
            map(lambda val: isinstance(val, Iterable) or val is None,
                select_isolated_track_duts)):
        raise ValueError(
            "Not all items in parameter select_isolated_track_duts are iterable or None."
        )
    # Finally check length of all arrays
    if len(select_isolated_track_duts) != len(select_duts):  # empty iterable
        raise ValueError(
            "Parameter select_isolated_track_duts has the wrong length.")

    # Create select_isolated_hit_duts
    if select_isolated_hit_duts is None:  # If None, use no selection
        select_isolated_hit_duts = [[] for _ in select_duts]
    # Check iterable and length
    if not isinstance(select_isolated_hit_duts, Iterable):
        raise ValueError(
            "Parameter select_isolated_hit_duts is not an iterable.")
    elif not select_isolated_hit_duts:  # empty iterable
        raise ValueError("Parameter select_isolated_hit_duts has no items.")
    # Check if only non-iterable in iterable
    if all(
            map(lambda val: not isinstance(val, Iterable) and val is not None,
                select_isolated_hit_duts)):
        select_isolated_hit_duts = [
            select_isolated_hit_duts[:] for _ in select_duts
        ]
    # Check if only iterable in iterable
    if not all(
            map(lambda val: isinstance(val, Iterable) or val is None,
                select_isolated_hit_duts)):
        raise ValueError(
            "Not all items in parameter select_isolated_hit_duts are iterable or None."
        )
    # Finally check length of all arrays
    if len(select_isolated_hit_duts) != len(select_duts):  # empty iterable
        raise ValueError(
            "Parameter select_isolated_hit_duts has the wrong length.")

    # Create query
    if query is None:  # If None, use empty strings for all DUTs
        query = ['' for _ in select_duts]
    # Check if iterable
    if isinstance(query, str):
        query = [query] * len(select_duts)
    # Check if only strings in iterable
    if not all(map(lambda val: isinstance(val, str), query)):
        raise ValueError("Not all items in parameter query are strings.")
    # Finally check length of all arrays
    if len(query) != len(select_duts):  # empty iterable
        raise ValueError("Parameter query has the wrong length.")

    with tb.open_file(input_tracks_file, mode='r') as in_file_h5:
        with tb.open_file(output_tracks_file, mode="w") as out_file_h5:
            for index, actual_dut_index in enumerate(select_duts):
                node = in_file_h5.get_node(in_file_h5.root,
                                           'Tracks_DUT%d' % actual_dut_index)
                logging.info('== Selecting tracks for %s ==',
                             telescope[actual_dut_index].name)
                if query[index]:
                    logging.info('Query string: {}'.format(query[index]))
                hit_mask = 0
                if select_hit_duts[index]:
                    for dut in select_hit_duts[index]:
                        hit_mask |= (1 << dut)
                no_hit_mask = 0
                if select_no_hit_duts[index]:
                    for dut in select_no_hit_duts[index]:
                        no_hit_mask |= (1 << dut)
                quality_mask = 0
                if select_quality_duts[index]:
                    for dut in select_quality_duts[index]:
                        quality_mask |= (1 << dut)
                isolated_track_mask = 0
                if select_isolated_track_duts[index]:
                    for dut in select_isolated_track_duts[index]:
                        isolated_track_mask |= (1 << dut)
                isolated_hit_mask = 0
                if select_isolated_hit_duts[index]:
                    for dut in select_isolated_hit_duts[index]:
                        isolated_hit_mask |= (1 << dut)

                tracks_table_out = out_file_h5.create_table(
                    where=out_file_h5.root,
                    name=node.name,
                    description=node.dtype,
                    title=node.title,
                    filters=tb.Filters(complib='blosc',
                                       complevel=5,
                                       fletcher32=False))

                total_n_tracks = node.shape[0]
                total_n_tracks_stored = 0
                total_n_events_stored = 0
                if max_events:
                    pbar = tqdm(total=max_events, ncols=80)
                else:
                    pbar = tqdm(total=total_n_tracks, ncols=80)

                total_n_events_stored_last = None
                # total_n_tracks_last = None
                last_index_chunk = None
                for tracks, index_chunk in analysis_utils.data_aligned_at_events(
                        node, chunk_size=chunk_size):
                    n_tracks_chunk = tracks.shape[0]
                    if hit_mask != 0 or no_hit_mask != 0 or quality_mask != 0 or isolated_track_mask != 0 or isolated_hit_mask != 0:
                        select = np.ones(n_tracks_chunk, dtype=np.bool)
                        if hit_mask != 0:
                            select &= ((tracks['hit_flag']
                                        & hit_mask) == hit_mask)
                        if no_hit_mask != 0:
                            select &= ((~tracks['hit_flag']
                                        & no_hit_mask) == no_hit_mask)
                        if quality_mask != 0:
                            # Require only quality if have a valid hit
                            quality_mask_mod = quality_mask & tracks['hit_flag']
                            quality_flags_mod = quality_mask & tracks[
                                'hit_flag']
                            select &= ((tracks['quality_flag'] &
                                        quality_mask_mod) == quality_flags_mod)
                        if isolated_track_mask != 0:
                            select &= (
                                (tracks['isolated_track_flag']
                                 & isolated_track_mask) == isolated_track_mask)
                        if isolated_hit_mask != 0:
                            # Require only isolated hit if have a valid hit
                            isolated_hit_mask_mod = isolated_hit_mask & tracks[
                                'hit_flag']
                            isolated_hit_flags_mod = isolated_hit_mask & tracks[
                                'hit_flag']
                            select &= ((tracks['isolated_hit_flag']
                                        & isolated_hit_mask_mod
                                        ) == isolated_hit_flags_mod)
                        tracks = tracks[select]
                    if query[index]:
                        tracks = table_where(arr=tracks,
                                             query_str=query[index])

                    if max_events:
                        unique_events = np.unique(tracks["event_number"])
                        n_events_chunk = unique_events.shape[0]
                        if total_n_tracks == index_chunk:  # last chunk, adding all remaining events
                            select_n_events = max_events - total_n_events_stored
                        elif total_n_events_stored == 0:  # first chunk
                            select_n_events = int(
                                round(max_events *
                                      (n_tracks_chunk / total_n_tracks)))
                        else:
                            # calculate correction of number of selected events
                            correction = (total_n_tracks - index_chunk) / total_n_tracks * 1 / (((total_n_tracks - last_index_chunk) / total_n_tracks) / ((max_events - total_n_events_stored_last) / max_events)) \
                                + (index_chunk) / total_n_tracks * 1 / (((last_index_chunk) / total_n_tracks) / ((total_n_events_stored_last) / max_events))
                            # select_n_events = np.ceil(n_events_chunk * correction)
                            # calculate correction of number of selected events
                            # correction = 1/(((total_n_tracks-last_index_chunk)/total_n_tracks_last)/((max_events-total_n_events_stored_last)/max_events))
                            select_n_events = int(
                                round(max_events *
                                      (n_tracks_chunk / total_n_tracks) *
                                      correction))
                        # do not store more events than in current chunk
                        select_n_events = min(n_events_chunk, select_n_events)
                        # do not store more events than given by max_events
                        select_n_events = min(
                            select_n_events,
                            max_events - total_n_events_stored)
                        np.random.seed(seed=0)
                        selected_events = np.random.choice(
                            unique_events, size=select_n_events, replace=False)
                        store_n_events = selected_events.shape[0]
                        total_n_events_stored += store_n_events
                        # print "store_n_events", store_n_events
                        selected_tracks = np.in1d(tracks["event_number"],
                                                  selected_events)
                        store_n_tracks = np.count_nonzero(selected_tracks)
                        # TODO: total_n_tracks_stored not used...
                        total_n_tracks_stored += store_n_tracks
                        tracks = tracks[selected_tracks]
                        pbar.update(n_events_chunk)
                    else:
                        pbar.update(n_tracks_chunk)

                    tracks_table_out.append(tracks)
                    tracks_table_out.flush()
                    total_n_events_stored_last = total_n_events_stored
                    # total_n_tracks_last = total_n_tracks
                    last_index_chunk = index_chunk
                pbar.close()

    return output_tracks_file