Beispiel #1
0
 def _run_quartile_reduction(self, scatter_workspace, transmission_workspace, direct_workspace, data_type,
                             scatter_monitor_workspace, component, state, centre1, centre2, r_min, r_max):
     algorithm_name = "SANSBeamCentreFinderCore"
     alg_options = {"ScatterWorkspace": scatter_workspace,
                    "ScatterMonitorWorkspace": scatter_monitor_workspace,
                    "TransmissionWorkspace": transmission_workspace,
                    "DirectWorkspace": direct_workspace,
                    "Component": component,
                    "SANSState": state,
                    "DataType": data_type,
                    "Centre1": centre1,
                    "Centre2": centre2,
                    "OutputWorkspaceLeft": EMPTY_NAME,
                    "OutputWorkspaceRight": EMPTY_NAME,
                    "OutputWorkspaceTop": EMPTY_NAME,
                    "OutputWorkspaceBottom": EMPTY_NAME,
                    "RMax": r_max,
                    "RMin": r_min}
     alg = create_child_algorithm(self, algorithm_name, **alg_options)
     alg.execute()
     out_left = strip_end_nans(alg.getProperty("OutputWorkspaceLeft").value, self)
     out_right = strip_end_nans(alg.getProperty("OutputWorkspaceRight").value, self)
     out_top = strip_end_nans(alg.getProperty("OutputWorkspaceTop").value, self)
     out_bottom = strip_end_nans(alg.getProperty("OutputWorkspaceBottom").value, self)
     return {MaskingQuadrant.Left: out_left, MaskingQuadrant.Right: out_right, MaskingQuadrant.Top: out_top,
             MaskingQuadrant.Bottom: out_bottom}
Beispiel #2
0
def get_final_output_workspaces(output_bundles, parent_alg):
    """
    This function provides the final steps for the data reduction.

    The final steps are:
    1. Can Subtraction (if required)
    2. Data clean up (if required)
    :param output_bundles: A set of outputBundles
    :param parent_alg: a handle to the parent algorithm.
    :return: a map of ReductionMode vs final output workspaces.
    """

    reduction_mode_vs_output_bundles = get_reduction_mode_vs_output_bundles(
        output_bundles)

    # For each reduction mode, we need to perform a can subtraction (and potential cleaning of the workspace)
    final_output_workspaces = {}
    for reduction_mode, output_bundles in reduction_mode_vs_output_bundles.items(
    ):
        # Find the sample and the can in the data collection
        output_sample_workspace = next(
            (output_bundle.output_workspace
             for output_bundle in output_bundles if is_sample(output_bundle)),
            None)
        output_can_workspace = next(
            (output_bundle.output_workspace
             for output_bundle in output_bundles if is_can(output_bundle)),
            None)
        # Perform the can subtraction
        if output_can_workspace is not None:
            final_output_workspace = perform_can_subtraction(
                output_sample_workspace, output_can_workspace, parent_alg)
        else:
            final_output_workspace = output_sample_workspace

        # Tidy up the workspace by removing start/end-NANs and start/end-INFs
        final_output_workspace = strip_end_nans(final_output_workspace,
                                                parent_alg)
        final_output_workspaces.update(
            {reduction_mode: final_output_workspace})

    # Finally add sample log information
    # TODO: Add log information

    return final_output_workspaces
Beispiel #3
0
def get_final_output_workspaces(completed_event_slices, parent_alg):
    """
    This function provides the final steps for the data reduction.

    The final steps are:
    1. Can Subtraction (if required)
    2. Data clean up (if required)
    :param completed_event_slices: A list of completed output bundles
    :param parent_alg: a handle to the parent algorithm.
    :return: a map of ReductionMode vs final output workspaces.
    """
    hab_lab_outputs = get_reduction_mode_vs_output_bundles(
        completed_event_slices)
    for k, v in hab_lab_outputs.items():
        hab_lab_outputs[k] = pair_up_wav_ranges(v)

    # For each reduction mode, we need to perform a can subtraction (and potential cleaning of the workspace)
    final_output_workspaces = {}
    for bank, all_reductions in hab_lab_outputs.items():
        final_output_workspaces[bank] = []
        for paired_reductions in all_reductions:
            can = next(
                filter(lambda i: i.output_bundle.data_type is DataType.CAN,
                       paired_reductions), None)
            sample = next(
                filter(lambda i: i.output_bundle.data_type is DataType.SAMPLE,
                       paired_reductions))
            # Perform the can subtraction
            if can:
                final_output_workspace = perform_can_subtraction(
                    sample.output_bundle.output_workspace,
                    can.output_bundle.output_workspace, parent_alg)
            else:
                final_output_workspace = sample.output_bundle.output_workspace

            # Tidy up the workspace by removing start/end-NANs and start/end-INFs
            final_output_workspace = strip_end_nans(final_output_workspace,
                                                    parent_alg)
            final_output_workspaces[bank].append(final_output_workspace)

    return final_output_workspaces
Beispiel #4
0
    def _do_test(self, data_x, data_y):
        # Arrange
        alg_ws = AlgorithmManager.createUnmanaged("CreateWorkspace")
        alg_ws.setChild(True)
        alg_ws.initialize()
        alg_ws.setProperty("OutputWorkspace", "test")

        alg_ws.setProperty("DataX", data_x)
        alg_ws.setProperty("DataY", data_y)
        alg_ws.execute()
        workspace = alg_ws.getProperty("OutputWorkspace").value

        # Act
        cropped_workspace = strip_end_nans(workspace)
        # Assert
        data_y = cropped_workspace.dataY(0)
        self.assertTrue(len(data_y) == 5)
        self.assertTrue(data_y[0] == 36.)
        self.assertTrue(data_y[1] == 44.)
        self.assertTrue(data_y[2] == 52.)
        self.assertTrue(data_y[3] == 63.)
        self.assertTrue(data_y[4] == 75.)
    def _do_test(self, data_x, data_y):
        # Arrange
        alg_ws = AlgorithmManager.createUnmanaged("CreateWorkspace")
        alg_ws.setChild(True)
        alg_ws.initialize()
        alg_ws.setProperty("OutputWorkspace", "test")

        alg_ws.setProperty("DataX", data_x)
        alg_ws.setProperty("DataY", data_y)
        alg_ws.execute()
        workspace = alg_ws.getProperty("OutputWorkspace").value

        # Act
        cropped_workspace = strip_end_nans(workspace)
        # Assert
        data_y = cropped_workspace.dataY(0)
        self.assertEqual(len(data_y),  5)
        self.assertEqual(data_y[0],  36.)
        self.assertEqual(data_y[1],  44.)
        self.assertEqual(data_y[2],  52.)
        self.assertEqual(data_y[3],  63.)
        self.assertEqual(data_y[4],  75.)
def get_final_output_workspaces(output_bundles, parent_alg):
    """
    This function provides the final steps for the data reduction.

    The final steps are:
    1. Can Subtraction (if required)
    2. Data clean up (if required)
    :param output_bundles: A set of outputBundles
    :param parent_alg: a handle to the parent algorithm.
    :return: a map of ReductionMode vs final output workspaces.
    """

    reduction_mode_vs_output_bundles = get_reduction_mode_vs_output_bundles(output_bundles)

    # For each reduction mode, we need to perform a can subtraction (and potential cleaning of the workspace)
    final_output_workspaces = {}
    for reduction_mode, output_bundles in reduction_mode_vs_output_bundles.items():
        # Find the sample and the can in the data collection
        output_sample_workspace = next((output_bundle.output_workspace for output_bundle in output_bundles
                                        if is_sample(output_bundle)), None)
        output_can_workspace = next((output_bundle.output_workspace for output_bundle in output_bundles
                                     if is_can(output_bundle)), None)
        # Perform the can subtraction
        if output_can_workspace is not None:
            final_output_workspace = perform_can_subtraction(output_sample_workspace, output_can_workspace, parent_alg)
        else:
            final_output_workspace = output_sample_workspace

        # Tidy up the workspace by removing start/end-NANs and start/end-INFs
        final_output_workspace = strip_end_nans(final_output_workspace, parent_alg)
        final_output_workspaces.update({reduction_mode: final_output_workspace})

    # Finally add sample log information
    # TODO: Add log information

    return final_output_workspaces
Beispiel #7
0
def _pack_outputs(reductions, parent_alg) -> List[Workspace2D]:
    if not reductions:
        return []
    final_output_workspaces = []
    for paired_reductions in reductions:
        can = next(
            filter(lambda i: i.output_bundle.data_type is DataType.CAN,
                   paired_reductions), None)
        sample = next(
            filter(lambda i: i.output_bundle.data_type is DataType.SAMPLE,
                   paired_reductions))
        # Perform the can subtraction
        if can:
            final_output_workspace = perform_can_subtraction(
                sample.output_bundle.output_workspace,
                can.output_bundle.output_workspace, parent_alg)
        else:
            final_output_workspace = sample.output_bundle.output_workspace

        # Tidy up the workspace by removing start/end-NANs and start/end-INFs
        final_output_workspace = strip_end_nans(final_output_workspace,
                                                parent_alg)
        final_output_workspaces.append(final_output_workspace)
    return final_output_workspaces
Beispiel #8
0
    def _pyexec(self):
        # Get state
        state = self._get_state()

        # Get reduction mode
        overall_reduction_mode = self._get_reduction_mode(state)

        # --------------------------------------------------------------------------------------------------------------
        # Perform the initial reduction. Version 1 does not have an initial reduction.
        # --------------------------------------------------------------------------------------------------------------
        reduction_setting_bundles = self.do_initial_reduction(
            state, overall_reduction_mode)

        # --------------------------------------------------------------------------------------------------------------
        # Setup main reduction
        # --------------------------------------------------------------------------------------------------------------

        # Run core reductions
        use_optimizations = self.getProperty("UseOptimizations").value
        save_can = self.getProperty("SaveCan").value

        # Create the reduction core algorithm
        reduction_alg = create_child_algorithm(self, self._reduction_name(),
                                               **{})

        # Set up progress
        progress = self._get_progress(
            sum([len(event_list) for event_list in reduction_setting_bundles]),
            overall_reduction_mode)

        # --------------------------------------------------------------------------------------------------------------
        # Reduction - here we slice the workspaces and perform the steps which must be carried out after slicing
        # --------------------------------------------------------------------------------------------------------------
        output_bundles, output_parts_bundles, \
            output_transmission_bundles = self.do_reduction(reduction_alg, reduction_setting_bundles, use_optimizations,
                                                            progress)

        reduction_mode_vs_output_workspaces = defaultdict(list)
        reduction_mode_vs_workspace_names = defaultdict(list)

        # --------------------------------------------------------------------------------------------------------------
        # Deal with non-merged
        # Note that we have non-merged workspaces even in the case of a merged reduction, ie LAB and HAB results
        # --------------------------------------------------------------------------------------------------------------
        progress.report("Final clean up...")
        for event_slice_bundle in output_bundles:
            output_workspaces_non_merged = get_final_output_workspaces(
                event_slice_bundle, self)
            for reduction_mode, workspace in output_workspaces_non_merged.items(
            ):
                reduction_mode_vs_output_workspaces[reduction_mode].append(
                    workspace)

            reduction_mode_vs_workspace_names = self._get_workspace_names(
                reduction_mode_vs_workspace_names, event_slice_bundle)

        # --------------------------------------------------------------------------------------------------------------
        # Deal with merging
        # --------------------------------------------------------------------------------------------------------------
        # Merge if required with stitching etc.
        scale_factors = []
        shift_factors = []
        if overall_reduction_mode is ReductionMode.MERGED:
            progress.report("Merging reductions ...")
            for i, event_slice_part_bundle in enumerate(output_parts_bundles):
                merge_bundle = get_merge_bundle_for_merge_request(
                    event_slice_part_bundle, self)
                scale_factors.append(merge_bundle.scale)
                shift_factors.append(merge_bundle.shift)
                reduction_mode_vs_output_workspaces[
                    ReductionMode.MERGED].append(merge_bundle.merged_workspace)
                merged_name = self._get_merged_workspace_name(
                    event_slice_part_bundle)
                reduction_mode_vs_workspace_names[ReductionMode.MERGED].append(
                    merged_name)

                scaled_HAB = strip_end_nans(merge_bundle.scaled_hab_workspace,
                                            self)
                reduction_mode_vs_output_workspaces[ReductionMode.HAB].append(
                    scaled_HAB)
                # Get HAB workspace name
                state = event_slice_part_bundle[0].state
                hab_name = self._get_output_workspace_name(
                    state, reduction_mode=ReductionMode.HAB)
                reduction_mode_vs_workspace_names[ReductionMode.HAB].append(
                    hab_name)

            self.set_shift_and_scale_output(scale_factors, shift_factors)

        # --------------------------------------------------------------------------------------------------------------
        # Set the output workspaces
        # --------------------------------------------------------------------------------------------------------------
        self.set_output_workspaces(reduction_mode_vs_output_workspaces,
                                   reduction_mode_vs_workspace_names)

        # --------------------------------------------------------------------------------------------------------------
        # Set the reduced can workspaces on the output if optimizations are
        # enabled. This will allow SANSBatchReduction to add them to the ADS.
        # --------------------------------------------------------------------------------------------------------------
        if use_optimizations:
            if not save_can:
                self.set_reduced_can_workspace_on_output(output_bundles)
            self.set_reduced_can_count_and_norm_on_output(output_parts_bundles)

        if save_can:
            self.set_can_and_sam_on_output(output_bundles)

        self.set_transmission_workspaces_on_output(
            output_transmission_bundles,
            state.adjustment.calculate_transmission.fit)
    def PyExec(self):
        # Get state
        state = self._get_state()

        # Get reduction mode
        overall_reduction_mode = self._get_reduction_mode(state)

        # Decide which core reduction information to run, i.e. HAB, LAB, ALL, MERGED. In the case of ALL and MERGED,
        # the required simple reduction modes need to be run. Normally this is HAB and LAB, future implementations
        # might have more detectors though (or different types)
        reduction_setting_bundles = self._get_reduction_setting_bundles(
            state, overall_reduction_mode)

        # Run core reductions
        use_optimizations = self.getProperty("UseOptimizations").value
        save_can = self.getProperty("SaveCan").value

        # Create the reduction core algorithm
        reduction_name = "SANSReductionCore"
        reduction_options = {}
        reduction_alg = create_child_algorithm(self, reduction_name,
                                               **reduction_options)

        # Set up progress
        progress = self._get_progress(len(reduction_setting_bundles),
                                      overall_reduction_mode)

        # --------------------------------------------------------------------------------------------------------------
        # Reduction
        # --------------------------------------------------------------------------------------------------------------
        output_bundles = []
        output_parts_bundles = []
        output_transmission_bundles = []
        for reduction_setting_bundle in reduction_setting_bundles:
            progress.report("Running a single reduction ...")
            # We want to make use of optimizations here. If a can workspace has already been reduced with the same can
            # settings and is stored in the ADS, then we should use it (provided the user has optimizations enabled).
            if use_optimizations and reduction_setting_bundle.data_type is DataType.Can:
                output_bundle, output_parts_bundle, output_transmission_bundle = run_optimized_for_can(
                    reduction_alg, reduction_setting_bundle)
            else:
                output_bundle, output_parts_bundle, output_transmission_bundle = run_core_reduction(
                    reduction_alg, reduction_setting_bundle)
            output_bundles.append(output_bundle)
            output_parts_bundles.append(output_parts_bundle)
            output_transmission_bundles.append(output_transmission_bundle)

        reduction_mode_vs_output_workspaces = {}

        # --------------------------------------------------------------------------------------------------------------
        # Deal with non-merged
        # Note that we have non-merged workspaces even in the case of a merged reduction, ie LAB and HAB results
        # --------------------------------------------------------------------------------------------------------------
        progress.report("Final clean up...")
        output_workspaces_non_merged = get_final_output_workspaces(
            output_bundles, self)
        reduction_mode_vs_output_workspaces.update(
            output_workspaces_non_merged)

        # --------------------------------------------------------------------------------------------------------------
        # Deal with merging
        # --------------------------------------------------------------------------------------------------------------
        # Merge if required with stitching etc.
        if overall_reduction_mode is ReductionMode.Merged:
            progress.report("Merging reductions ...")
            merge_bundle = get_merge_bundle_for_merge_request(
                output_parts_bundles, self)
            self.set_shift_and_scale_output(merge_bundle)
            reduction_mode_vs_output_workspaces.update(
                {ReductionMode.Merged: merge_bundle.merged_workspace})
            scaled_HAB = strip_end_nans(merge_bundle.scaled_hab_workspace,
                                        self)
            reduction_mode_vs_output_workspaces.update(
                {ISISReductionMode.HAB: scaled_HAB})

        # --------------------------------------------------------------------------------------------------------------
        # Set the output workspaces
        # --------------------------------------------------------------------------------------------------------------
        # Set sample logs
        # Todo: Set sample log -> Userfile and unfitted transmission workspace. Should probably set on
        # higher level (SANSBatch)
        # Set the output workspaces
        self.set_output_workspaces(reduction_mode_vs_output_workspaces)

        # --------------------------------------------------------------------------------------------------------------
        # Set the reduced can workspaces on the output if optimizations are
        # enabled. This will allow SANSBatchReduction to add them to the ADS.
        # --------------------------------------------------------------------------------------------------------------
        if use_optimizations:
            self.set_reduced_can_workspace_on_output(output_bundles,
                                                     output_parts_bundles)

        if save_can:
            self.set_can_and_sam_on_output(output_bundles)

        self.set_transmission_workspaces_on_output(
            output_transmission_bundles,
            state.adjustment.calculate_transmission.fit)
Beispiel #10
0
    def _pyexec(self):
        # Get state
        state = self._get_state()

        # Get reduction mode
        overall_reduction_mode = self._get_reduction_mode(state)

        # --------------------------------------------------------------------------------------------------------------
        # Perform the initial reduction. Version 1 does not have an initial reduction.
        # --------------------------------------------------------------------------------------------------------------
        reduction_setting_bundles = self.do_initial_reduction(
            state, overall_reduction_mode)

        # --------------------------------------------------------------------------------------------------------------
        # Setup main reduction
        # --------------------------------------------------------------------------------------------------------------

        # Run core reductions
        use_optimizations = self.getProperty("UseOptimizations").value
        save_can = self.getProperty("SaveCan").value

        # Create the reduction core algorithm
        reduction_alg = create_child_algorithm(self, self._reduction_name(),
                                               **{})

        # Set up progress
        progress = self._get_progress(
            sum([len(event_list) for event_list in reduction_setting_bundles]),
            overall_reduction_mode)

        # --------------------------------------------------------------------------------------------------------------
        # Reduction - here we slice the workspaces and perform the steps which must be carried out after slicing
        # --------------------------------------------------------------------------------------------------------------
        completed_event_slices: CompletedSlices = []
        for event_slice in reduction_setting_bundles:
            # The single reductions represent CAN / sample reductions
            for bundle in event_slice:
                reduced_slices = self.do_reduction(reduction_alg, bundle,
                                                   use_optimizations, progress)
                # Merge the list of lists into a single flat list to keep our lives easier
                completed_event_slices.extend(reduced_slices)

        # --------------------------------------------------------------------------------------------------------------
        # Deal with non-merged
        # Note that we have non-merged workspaces even in the case of a merged reduction, ie LAB and HAB results
        # --------------------------------------------------------------------------------------------------------------
        progress.report("Final clean up...")

        workflow_alg_outputs = get_final_output_workspaces(
            completed_event_slices, self)

        # --------------------------------------------------------------------------------------------------------------
        # Deal with merging
        # --------------------------------------------------------------------------------------------------------------
        # Merge if required with stitching etc.
        scale_factors = []
        shift_factors = []

        if overall_reduction_mode is ReductionMode.MERGED:
            progress.report("Merging reductions ...")
            merge_bundle = get_merge_bundle_for_merge_request(
                completed_event_slices, self)
            for merged in merge_bundle:
                scale_factors.append(merged.scale)
                shift_factors.append(merged.shift)
                workflow_alg_outputs.merged_output.append(
                    merged.merged_workspace)
                # Pack scaled HAB as a diagnostic tool
                scaled_HAB = strip_end_nans(merged.scaled_hab_workspace, self)
                workflow_alg_outputs.scaled_hab_output.append(scaled_HAB)

            self.set_shift_and_scale_output(scale_factors, shift_factors)

        # --------------------------------------------------------------------------------------------------------------
        # Set the output workspaces
        # --------------------------------------------------------------------------------------------------------------
        self.set_output_workspaces(workflow_alg_outputs)

        # --------------------------------------------------------------------------------------------------------------
        # Set the reduced can workspaces on the output if optimizations are
        # enabled. This will allow SANSBatchReduction to add them to the ADS.
        # --------------------------------------------------------------------------------------------------------------
        if use_optimizations:
            if save_can:
                self.set_reduced_can_workspace_on_output(
                    completed_event_slices)
            self.set_reduced_can_count_and_norm_on_output(
                completed_event_slices)

        if save_can:
            self.set_can_and_sam_on_output(completed_event_slices)

        self.set_transmission_workspaces_on_output(
            completed_event_slices,
            state.adjustment.calculate_transmission.fit)
    def PyExec(self):
        # Get state
        state = self._get_state()

        # Get reduction mode
        overall_reduction_mode = self._get_reduction_mode(state)

        # Decide which core reduction information to run, i.e. HAB, LAB, ALL, MERGED. In the case of ALL and MERGED,
        # the required simple reduction modes need to be run. Normally this is HAB and LAB, future implementations
        # might have more detectors though (or different types)
        reduction_setting_bundles = self._get_reduction_setting_bundles(state, overall_reduction_mode)

        # Run core reductions
        use_optimizations = self.getProperty("UseOptimizations").value
        save_can = self.getProperty("SaveCan").value

        # Create the reduction core algorithm
        reduction_name = "SANSReductionCore"
        reduction_options = {}
        reduction_alg = create_child_algorithm(self, reduction_name, **reduction_options)

        # Set up progress
        progress = self._get_progress(len(reduction_setting_bundles), overall_reduction_mode)

        # --------------------------------------------------------------------------------------------------------------
        # Reduction
        # --------------------------------------------------------------------------------------------------------------
        output_bundles = []
        output_parts_bundles = []
        output_transmission_bundles = []
        for reduction_setting_bundle in reduction_setting_bundles:
            progress.report("Running a single reduction ...")
            # We want to make use of optimizations here. If a can workspace has already been reduced with the same can
            # settings and is stored in the ADS, then we should use it (provided the user has optimizations enabled).
            if use_optimizations and reduction_setting_bundle.data_type is DataType.Can:
                output_bundle, output_parts_bundle, output_transmission_bundle = run_optimized_for_can(reduction_alg,
                                                                                                       reduction_setting_bundle)
            else:
                output_bundle, output_parts_bundle, output_transmission_bundle = run_core_reduction(reduction_alg,
                                                                                                    reduction_setting_bundle)
            output_bundles.append(output_bundle)
            output_parts_bundles.append(output_parts_bundle)
            output_transmission_bundles.append(output_transmission_bundle)

        reduction_mode_vs_output_workspaces = {}

        # --------------------------------------------------------------------------------------------------------------
        # Deal with non-merged
        # Note that we have non-merged workspaces even in the case of a merged reduction, ie LAB and HAB results
        # --------------------------------------------------------------------------------------------------------------
        progress.report("Final clean up...")
        output_workspaces_non_merged = get_final_output_workspaces(output_bundles, self)
        reduction_mode_vs_output_workspaces.update(output_workspaces_non_merged)

        # --------------------------------------------------------------------------------------------------------------
        # Deal with merging
        # --------------------------------------------------------------------------------------------------------------
        # Merge if required with stitching etc.
        if overall_reduction_mode is ReductionMode.Merged:
            progress.report("Merging reductions ...")
            merge_bundle = get_merge_bundle_for_merge_request(output_parts_bundles, self)
            self.set_shift_and_scale_output(merge_bundle)
            reduction_mode_vs_output_workspaces.update({ReductionMode.Merged: merge_bundle.merged_workspace})
            scaled_HAB = strip_end_nans(merge_bundle.scaled_hab_workspace, self)
            reduction_mode_vs_output_workspaces.update({ISISReductionMode.HAB: scaled_HAB})

        # --------------------------------------------------------------------------------------------------------------
        # Set the output workspaces
        # --------------------------------------------------------------------------------------------------------------
        # Set sample logs
        # Todo: Set sample log -> Userfile and unfitted transmission workspace. Should probably set on
        # higher level (SANSBatch)
        # Set the output workspaces
        self.set_output_workspaces(reduction_mode_vs_output_workspaces)

        # --------------------------------------------------------------------------------------------------------------
        # Set the reduced can workspaces on the output if optimizations are
        # enabled. This will allow SANSBatchReduction to add them to the ADS.
        # --------------------------------------------------------------------------------------------------------------
        if use_optimizations:
            self.set_reduced_can_workspace_on_output(output_bundles, output_parts_bundles)

        if save_can:
            self.set_can_and_sam_on_output(output_bundles)

        self.set_transmission_workspaces_on_output(output_transmission_bundles,
                                                   state.adjustment.calculate_transmission.fit)