def _run_reduction_core(self, state, workspace, monitor, transmission=None, direct=None,
                            detector_type=DetectorType.LAB, component=DataType.SAMPLE):
        reduction_core_alg = AlgorithmManager.createUnmanaged("SANSReductionCore")
        reduction_core_alg.setChild(True)
        reduction_core_alg.initialize()

        state_dict = Serializer.to_json(state)
        reduction_core_alg.setProperty("SANSState", state_dict)
        reduction_core_alg.setProperty("ScatterWorkspace", workspace)
        reduction_core_alg.setProperty("ScatterMonitorWorkspace", monitor)

        if transmission:
            reduction_core_alg.setProperty("TransmissionWorkspace", transmission)

        if direct:
            reduction_core_alg.setProperty("DirectWorkspace", direct)

        reduction_core_alg.setProperty("Component", detector_type.value)
        reduction_core_alg.setProperty("DataType", component.value)

        reduction_core_alg.setProperty("OutputWorkspace", EMPTY_NAME)

        reduction_core_alg.setProperty("CalculatedTransmissionWorkspace", EMPTY_NAME)
        reduction_core_alg.setProperty("UnfittedTransmissionWorkspace", EMPTY_NAME)

        # Act
        reduction_core_alg.execute()
        self.assertTrue(reduction_core_alg.isExecuted())
        return reduction_core_alg
Beispiel #2
0
    def test_that_angle_masking_is_applied(self):
        # Arrange
        file_information_factory = SANSFileInformationFactory()
        file_information = file_information_factory.create_sans_file_information(
            "SANS2D00028827")
        data_builder = get_data_builder(SANSFacility.ISIS, file_information)
        data_builder.set_sample_scatter("SANS2D00028827")
        data_info = data_builder.build()

        mask_builder = get_mask_builder(data_info)

        # Expected_spectra
        phi_mirror = False
        phi_min = 0.
        phi_max = 90.
        # This should mask everything except for the upper right quadrant
        # | 120              |-------------------|
        # |                 |---------------------|
        # | 60               |-------------------|
        # |                 |----------------------|
        # |
        # |
        # |-------------------|------------------|
        # 512                256                 0

        expected_spectra = []
        # The strange double pattern arises from the offset of the SANS2D tube geometry (see InstrumentView)
        for y in range(60, 120):
            if y % 2 == 0:
                expected_spectra.extend(
                    ((y * 512) + 9 + x for x in range(0, 255)))
            else:
                expected_spectra.extend(
                    ((y * 512) + 9 + x for x in range(0, 257)))
        expected_spectra.extend((x for x in range(92169, 122889)))  # HAB

        mask_builder.set_use_mask_phi_mirror(phi_mirror)
        mask_builder.set_phi_min(phi_min)
        mask_builder.set_phi_max(phi_max)

        mask_info = mask_builder.build()

        test_director = TestDirector()
        test_director.set_states(data_state=data_info, mask_state=mask_info)
        state = test_director.construct()

        returned_data = SANSLoad(SANSState=Serializer.to_json(state),
                                 SampleScatterWorkspace="mask_sans_ws",
                                 SampleScatterMonitorWorkspace="dummy")

        workspace = returned_data[0]
        DeleteWorkspace(returned_data[1])

        # Act
        workspace = mask_workspace(state=state,
                                   component_as_string="LAB",
                                   workspace=workspace)

        # Assert
        self._do_assert_non_masked(workspace, expected_spectra)
Beispiel #3
0
def set_properties_for_beam_centre_algorithm(beam_centre_alg,
                                             reduction_package,
                                             workspace_to_name,
                                             workspace_to_monitor):
    """
    Sets up everything necessary on the beam centre algorithm.

    :param beam_centre_alg: a handle to the beam centre algorithm
    :param reduction_package: a reduction package object
    :param workspace_to_name: the workspace to name map
    :param workspace_to_monitor: a workspace to monitor map
    """
    # Go through the elements of the reduction package and set them on the beam centre algorithm
    # Set the SANSState
    state = reduction_package.state
    state_json = Serializer.to_json(state)
    beam_centre_alg.setProperty("SANSState", state_json)

    # Set the input workspaces
    workspaces = reduction_package.workspaces
    for workspace_type, workspace in list(workspaces.items()):
        if workspace is not None:
            beam_centre_alg.setProperty(workspace_to_name[workspace_type],
                                        workspace)

    # Set the monitors
    monitors = reduction_package.monitors
    for workspace_type, monitor in list(monitors.items()):
        if monitor is not None:
            beam_centre_alg.setProperty(workspace_to_monitor[workspace_type],
                                        monitor)
    def _load_workspace(self, state):
        load_alg = AlgorithmManager.createUnmanaged("SANSLoad")
        load_alg.setChild(True)
        load_alg.initialize()

        state_dict = Serializer.to_json(state)
        load_alg.setProperty("SANSState", state_dict)
        load_alg.setProperty("PublishToCache", False)
        load_alg.setProperty("UseCached", False)
        load_alg.setProperty("SampleScatterWorkspace", EMPTY_NAME)
        load_alg.setProperty("SampleScatterMonitorWorkspace", EMPTY_NAME)
        if state.data.sample_transmission:
            load_alg.setProperty("SampleTransmissionWorkspace", EMPTY_NAME)
        if state.data.sample_direct:
            load_alg.setProperty("SampleDirectWorkspace", EMPTY_NAME)

        # Act
        load_alg.execute()
        self.assertTrue(load_alg.isExecuted())
        sample_scatter = load_alg.getProperty("SampleScatterWorkspace").value
        sample_scatter_monitor_workspace = load_alg.getProperty(
            "SampleScatterMonitorWorkspace").value
        if state.data.sample_transmission:
            transmission_workspace = load_alg.getProperty(
                "SampleTransmissionWorkspace").value
        else:
            transmission_workspace = None
        if state.data.sample_direct:
            direct_workspace = load_alg.getProperty(
                "SampleDirectWorkspace").value
        else:
            direct_workspace = None
        return sample_scatter, sample_scatter_monitor_workspace, transmission_workspace, direct_workspace
Beispiel #5
0
    def _run_load(state,
                  publish_to_cache,
                  use_cached,
                  move_workspace=False,
                  beam_coordinates=None,
                  component=None,
                  output_workspace_names=None):
        load_alg = AlgorithmManager.createUnmanaged("SANSLoad")
        load_alg.setChild(True)
        load_alg.setRethrows(True)
        load_alg.initialize()

        state_dict = Serializer.to_json(state)
        load_alg.setProperty("SANSState", state_dict)
        load_alg.setProperty("PublishToCache", publish_to_cache)
        load_alg.setProperty("UseCached", use_cached)
        if move_workspace:
            load_alg.setProperty("Component", component)
            load_alg.setProperty("BeamCoordinates", beam_coordinates)

        if output_workspace_names:
            for name, value in output_workspace_names.items():
                load_alg.setProperty(name, value)

        # Act
        load_alg.execute()
        # self.assertTrue(load_alg.isExecuted())
        return load_alg
    def _run_single_reduction(self,
                              state,
                              sample_scatter,
                              sample_monitor,
                              sample_transmission=None,
                              sample_direct=None,
                              can_scatter=None,
                              can_monitor=None,
                              can_transmission=None,
                              can_direct=None,
                              output_settings=None,
                              event_slice_optimisation=False,
                              save_can=False,
                              use_optimizations=False):
        single_reduction_name = "SANSSingleReduction"
        ver = 1 if not event_slice_optimisation else 2
        state_dict = Serializer.to_json(state)

        single_reduction_options = {
            "SANSState": state_dict,
            "SampleScatterWorkspace": sample_scatter,
            "SampleScatterMonitorWorkspace": sample_monitor,
            "UseOptimizations": use_optimizations,
            "SaveCan": save_can
        }
        if sample_transmission:
            single_reduction_options.update(
                {"SampleTransmissionWorkspace": sample_transmission})

        if sample_direct:
            single_reduction_options.update(
                {"SampleDirectWorkspace": sample_direct})

        if can_scatter:
            single_reduction_options.update(
                {"CanScatterWorkspace": can_scatter})

        if can_monitor:
            single_reduction_options.update(
                {"CanScatterMonitorWorkspace": can_monitor})

        if can_transmission:
            single_reduction_options.update(
                {"CanTransmissionWorkspace": can_transmission})

        if can_direct:
            single_reduction_options.update({"CanDirectWorkspace": can_direct})

        if output_settings:
            single_reduction_options.update(output_settings)

        single_reduction_alg = create_unmanaged_algorithm(
            single_reduction_name, version=ver, **single_reduction_options)

        # Act
        single_reduction_alg.execute()
        self.assertTrue(single_reduction_alg.isExecuted())
        return single_reduction_alg
    def _load_workspace(self, state, workspace_name):
        self._prepare_to_load_scatter_sample_only(state)
        self._handle_multi_period_data(state)

        serialized_state = Serializer.to_json(state)
        workspace = self._perform_load(serialized_state)
        self._perform_move(state, workspace)
        self._store_in_ads_as_hidden(workspace_name, workspace)
        return workspace
Beispiel #8
0
    def display_state_diagnostic_tree(self, state):
        # Convert to dict before passing the state to the view
        dict_vals = None

        if state:
            state = Serializer.to_json(state)
            dict_vals = json.loads(state)  # We intentionally do not use serializer to get a dict type back

        self._view.set_tree(dict_vals)
def get_state_hash_for_can_reduction(state, reduction_mode, partial_type=None):
    """
    Creates a hash for a (modified) state object.

    Note that we need to modify the state object to exclude elements which are not relevant for the can reduction.
    This is primarily the setting of the sample workspaces. This is the only place where we directly alter the value
    of a state object in the entire reduction workflow. Note that we are not changing the
    :param state: a SANSState object.
    :param reduction_mode: the reduction mode, here it can be LAB or HAb
    :param partial_type: if it is a partial type, then it needs to be specified here.
    :return: the hash of the state
    """
    def remove_sample_related_information(full_state):
        state_to_hash = deepcopy(full_state)

        # Data
        state_to_hash.data.sample_scatter = EMPTY_NAME
        state_to_hash.data.sample_scatter_period = ALL_PERIODS
        state_to_hash.data.sample_transmission = EMPTY_NAME
        state_to_hash.data.sample_transmission_period = ALL_PERIODS
        state_to_hash.data.sample_direct = EMPTY_NAME
        state_to_hash.data.sample_direct_period = ALL_PERIODS
        state_to_hash.data.sample_scatter_run_number = 1

        # Save
        state_to_hash.save.user_specified_output_name = ""

        return state_to_hash

    new_state = remove_sample_related_information(state)
    new_state_serialized = Serializer.to_json(new_state)
    new_state_serialized = json.dumps(new_state_serialized,
                                      sort_keys=True,
                                      indent=4)

    # Add a tag for the reduction mode
    state_string = str(new_state_serialized)
    if reduction_mode is ReductionMode.LAB:
        state_string += "LAB"
    elif reduction_mode is ReductionMode.HAB:
        state_string += "HAB"
    else:
        raise RuntimeError(
            "Only LAB and HAB reduction modes are allowed at this point."
            " {} was provided".format(reduction_mode))

    # If we are dealing with a partial output workspace, then mark it as such
    if partial_type is OutputParts.COUNT:
        state_string += "counts"
    elif partial_type is OutputParts.NORM:
        state_string += "norm"
    elif partial_type is TransmissionType.CALCULATED:
        state_string += "calculated_transmission"
    elif partial_type is TransmissionType.UNFITTED:
        state_string += "unfitted_transmission"
    return str(get_hash_value(state_string))
Beispiel #10
0
def run_initial_event_slice_reduction(reduction_alg, reduction_setting_bundle):
    """
    This function runs the initial core reduction for event slice data. This is essentially half
    a reduction (either sample or can), and is run before event slicing has been performed.

    :param reduction_alg: a handle to the initial event slice reduction algorithm.
    :param reduction_setting_bundle: a ReductionSettingBundle tuple
    :return: a EventSliceReductionSettingBundle tuple
    """
    # Get component to reduce
    component = get_component_to_reduce(reduction_setting_bundle)
    # Set the properties on the reduction algorithms
    serialized_state = Serializer.to_json(reduction_setting_bundle.state)
    reduction_alg.setProperty("SANSState", serialized_state)
    reduction_alg.setProperty("Component", component)
    reduction_alg.setProperty("ScatterWorkspace",
                              reduction_setting_bundle.scatter_workspace)
    reduction_alg.setProperty(
        "ScatterMonitorWorkspace",
        reduction_setting_bundle.scatter_monitor_workspace)
    reduction_alg.setProperty("DataType",
                              reduction_setting_bundle.data_type.value)

    reduction_alg.setProperty("OutputWorkspace", EMPTY_NAME)
    reduction_alg.setProperty("OutputMonitorWorkspace", EMPTY_NAME)

    # Run the reduction core
    reduction_alg.execute()

    # Get the results
    output_workspace = reduction_alg.getProperty("OutputWorkspace").value
    mask_workspace = reduction_alg.getProperty("DummyMaskWorkspace").value
    output_monitor_workspace = reduction_alg.getProperty(
        "OutputMonitorWorkspace").value

    return EventSliceSettingBundle(
        state=reduction_setting_bundle.state,
        data_type=reduction_setting_bundle.data_type,
        reduction_mode=reduction_setting_bundle.reduction_mode,
        output_parts=reduction_setting_bundle.output_parts,
        scatter_workspace=output_workspace,
        dummy_mask_workspace=mask_workspace,
        scatter_monitor_workspace=output_monitor_workspace,
        direct_workspace=reduction_setting_bundle.direct_workspace,
        transmission_workspace=reduction_setting_bundle.transmission_workspace)
    def _run_beam_centre_core(self,
                              state,
                              workspace,
                              monitor,
                              transmission=None,
                              direct=None,
                              detector_type=DetectorType.LAB,
                              component=DataType.SAMPLE,
                              centre_1=0.1,
                              centre_2=-0.1,
                              r_min=0.06,
                              r_max=0.26):
        beam_centre_core_alg = AlgorithmManager.createUnmanaged(
            "SANSBeamCentreFinderCore")
        beam_centre_core_alg.setChild(True)
        beam_centre_core_alg.initialize()

        state_dict = Serializer.to_json(state)
        beam_centre_core_alg.setProperty("SANSState", state_dict)
        beam_centre_core_alg.setProperty("ScatterWorkspace", workspace)
        beam_centre_core_alg.setProperty("ScatterMonitorWorkspace", monitor)

        if transmission:
            beam_centre_core_alg.setProperty("TransmissionWorkspace",
                                             transmission)

        if direct:
            beam_centre_core_alg.setProperty("DirectWorkspace", direct)

        beam_centre_core_alg.setProperty("Component", detector_type.value)
        beam_centre_core_alg.setProperty("DataType", component.value)
        beam_centre_core_alg.setProperty("Centre1", centre_1)
        beam_centre_core_alg.setProperty("Centre2", centre_2)
        beam_centre_core_alg.setProperty("RMax", r_max)
        beam_centre_core_alg.setProperty("RMin", r_min)

        beam_centre_core_alg.setProperty("OutputWorkspaceLeft", EMPTY_NAME)
        beam_centre_core_alg.setProperty("OutputWorkspaceRight", EMPTY_NAME)
        beam_centre_core_alg.setProperty("OutputWorkspaceTop", EMPTY_NAME)
        beam_centre_core_alg.setProperty("OutputWorkspaceBottom", EMPTY_NAME)

        # Act
        beam_centre_core_alg.execute()
        self.assertTrue(beam_centre_core_alg.isExecuted())
        return beam_centre_core_alg
    def test_that_enum_list_can_be_serialized(self):
        original_obj = ExampleWrapper()
        original_obj.bar = [FakeEnumClass.BAR, FakeEnumClass.BAR]

        # Serializing test
        serialized = Serializer.to_json(original_obj)
        self.assertTrue("bar" in serialized)
        self.assertTrue("_foo" in serialized)
        self.assertTrue(isinstance(serialized, str))

        # Deserializing Test
        fake = JsonSerializerTest.FakeAlgorithm()
        fake.initialize()
        fake.setProperty("Args", serialized)
        property_manager = fake.getProperty("Args").value

        new_obj = Serializer.from_json(property_manager)
        self.assertEqual(original_obj.bar, new_obj.bar)
        self.assertEqual(original_obj._foo, new_obj._foo)
Beispiel #13
0
def run_core_reduction(reduction_alg, reduction_setting_bundle):
    """
    This function runs a core reduction. This is essentially half a reduction (either sample or can).

    :param reduction_alg: a handle to the reduction algorithm.
    :param reduction_setting_bundle: a ReductionSettingBundle tuple
    :return: an OutputBundle and an OutputPartsBundle
    """

    # Get component to reduce
    component = get_component_to_reduce(reduction_setting_bundle)
    # Set the properties on the reduction algorithms
    serialized_state = Serializer.to_json(reduction_setting_bundle.state)
    reduction_alg.setProperty("SANSState", serialized_state)
    reduction_alg.setProperty("Component", component)
    reduction_alg.setProperty("ScatterWorkspace",
                              reduction_setting_bundle.scatter_workspace)
    reduction_alg.setProperty(
        "ScatterMonitorWorkspace",
        reduction_setting_bundle.scatter_monitor_workspace)
    reduction_alg.setProperty("DataType",
                              reduction_setting_bundle.data_type.value)

    if reduction_setting_bundle.transmission_workspace is not None:
        reduction_alg.setProperty(
            "TransmissionWorkspace",
            reduction_setting_bundle.transmission_workspace)

    if reduction_setting_bundle.direct_workspace is not None:
        reduction_alg.setProperty("DirectWorkspace",
                                  reduction_setting_bundle.direct_workspace)

    reduction_alg.setProperty("OutputWorkspaces", EMPTY_NAME)
    reduction_alg.setProperty("SumOfCounts", EMPTY_NAME)
    reduction_alg.setProperty("SumOfNormFactors", EMPTY_NAME)

    # Run the reduction core
    reduction_alg.execute()

    # Get the results
    reduced_slices = _pack_bundles(reduction_alg, reduction_setting_bundle)
    return reduced_slices
Beispiel #14
0
def run_core_event_slice_reduction(reduction_alg, reduction_setting_bundle):
    """
    This function runs a core reduction for event slice data. This reduction slices by event time and converts to q.
    All other operations, such as moving and converting to histogram, have been performed before the event slicing.

    :param reduction_alg: a handle to the reduction algorithm.
    :param reduction_setting_bundle: a ReductionSettingBundle tuple
    :return: a list of reduced slices
    """

    # Get component to reduce
    component = get_component_to_reduce(reduction_setting_bundle)
    # Set the properties on the reduction algorithms
    serialized_state = Serializer.to_json(reduction_setting_bundle.state)
    reduction_alg.setProperty("SANSState", serialized_state)
    reduction_alg.setProperty("Component", component)
    reduction_alg.setProperty("ScatterWorkspace",
                              reduction_setting_bundle.scatter_workspace)
    reduction_alg.setProperty("DirectWorkspace",
                              reduction_setting_bundle.direct_workspace)
    reduction_alg.setProperty("TransmissionWorkspace",
                              reduction_setting_bundle.transmission_workspace)
    reduction_alg.setProperty("DummyMaskWorkspace",
                              reduction_setting_bundle.dummy_mask_workspace)
    reduction_alg.setProperty(
        "ScatterMonitorWorkspace",
        reduction_setting_bundle.scatter_monitor_workspace)

    reduction_alg.setProperty("DataType",
                              reduction_setting_bundle.data_type.value)

    reduction_alg.setProperty("OutputWorkspaces", EMPTY_NAME)
    reduction_alg.setProperty("SumOfCounts", EMPTY_NAME)
    reduction_alg.setProperty("SumOfNormFactors", EMPTY_NAME)

    # Run the reduction core
    reduction_alg.execute()

    # Pull the result out of the workspace
    reduced_slices = _pack_bundles(reduction_alg, reduction_setting_bundle)

    return reduced_slices
    def test_that_sans_state_can_be_serialized_and_deserialized_when_going_through_an_algorithm(
            self):
        # Arrange
        state = ComplexState()

        # Act
        serialized = Serializer.to_json(state)
        fake = JsonSerializerTest.FakeAlgorithm()
        fake.initialize()
        fake.setProperty("Args", serialized)
        property_manager = fake.getProperty("Args").value

        # Assert
        self.assertEqual(type(serialized), str)
        state_2 = Serializer.from_json(property_manager)

        # The direct sub state
        self.assertEqual(state.sub_state_1.float_list_parameter,
                         state_2.sub_state_1.float_list_parameter)

        # The regular parameters
        self.assertEqual(state_2.float_parameter, 23.)
        self.assertEqual(state_2.positive_float_with_none_parameter, 234.)
Beispiel #16
0
def run_core_event_slice_reduction(reduction_alg, reduction_setting_bundle):
    """
    This function runs a core reduction for event slice data. This reduction slices by event time and converts to q.
    All other operations, such as moving and converting to histogram, have been performed before the event slicing.

    :param reduction_alg: a handle to the reduction algorithm.
    :param reduction_setting_bundle: a ReductionSettingBundle tuple
    :return: an OutputBundle and an OutputPartsBundle
    """

    # Get component to reduce
    component = get_component_to_reduce(reduction_setting_bundle)
    # Set the properties on the reduction algorithms
    serialized_state = Serializer.to_json(reduction_setting_bundle.state)
    reduction_alg.setProperty("SANSState", serialized_state)
    reduction_alg.setProperty("Component", component)
    reduction_alg.setProperty("ScatterWorkspace",
                              reduction_setting_bundle.scatter_workspace)
    reduction_alg.setProperty("DirectWorkspace",
                              reduction_setting_bundle.direct_workspace)
    reduction_alg.setProperty("TransmissionWorkspace",
                              reduction_setting_bundle.transmission_workspace)
    reduction_alg.setProperty("DummyMaskWorkspace",
                              reduction_setting_bundle.dummy_mask_workspace)
    reduction_alg.setProperty(
        "ScatterMonitorWorkspace",
        reduction_setting_bundle.scatter_monitor_workspace)

    reduction_alg.setProperty("DataType",
                              reduction_setting_bundle.data_type.value)

    reduction_alg.setProperty("OutputWorkspace", EMPTY_NAME)
    reduction_alg.setProperty("SumOfCounts", EMPTY_NAME)
    reduction_alg.setProperty("SumOfNormFactors", EMPTY_NAME)

    # Run the reduction core
    reduction_alg.execute()

    # Get the results
    output_workspace = reduction_alg.getProperty("OutputWorkspace").value
    output_workspace_count = reduction_alg.getProperty("SumOfCounts").value
    output_workspace_norm = reduction_alg.getProperty("SumOfNormFactors").value
    output_calculated_transmission_workspace = reduction_alg.getProperty(
        "CalculatedTransmissionWorkspace").value
    output_unfitted_transmission_workspace = reduction_alg.getProperty(
        "UnfittedTransmissionWorkspace").value

    # Pull the result out of the workspace
    output_bundle = OutputBundle(
        state=reduction_setting_bundle.state,
        data_type=reduction_setting_bundle.data_type,
        reduction_mode=reduction_setting_bundle.reduction_mode,
        output_workspace=output_workspace)

    output_parts_bundle = OutputPartsBundle(
        state=reduction_setting_bundle.state,
        data_type=reduction_setting_bundle.data_type,
        reduction_mode=reduction_setting_bundle.reduction_mode,
        output_workspace_count=output_workspace_count,
        output_workspace_norm=output_workspace_norm)

    output_transmission_bundle = OutputTransmissionBundle(
        state=reduction_setting_bundle.state,
        data_type=reduction_setting_bundle.data_type,
        calculated_transmission_workspace=
        output_calculated_transmission_workspace,
        unfitted_transmission_workspace=output_unfitted_transmission_workspace,
    )
    return output_bundle, output_parts_bundle, output_transmission_bundle
Beispiel #17
0
def run_core_reduction(reduction_alg, reduction_setting_bundle):
    """
    This function runs a core reduction. This is essentially half a reduction (either sample or can).

    :param reduction_alg: a handle to the reduction algorithm.
    :param reduction_setting_bundle: a ReductionSettingBundle tuple
    :return: an OutputBundle and an OutputPartsBundle
    """

    # Get component to reduce
    component = get_component_to_reduce(reduction_setting_bundle)
    # Set the properties on the reduction algorithms
    serialized_state = Serializer.to_json(reduction_setting_bundle.state)
    reduction_alg.setProperty("SANSState", serialized_state)
    reduction_alg.setProperty("Component", component)
    reduction_alg.setProperty("ScatterWorkspace",
                              reduction_setting_bundle.scatter_workspace)
    reduction_alg.setProperty(
        "ScatterMonitorWorkspace",
        reduction_setting_bundle.scatter_monitor_workspace)
    reduction_alg.setProperty("DataType",
                              reduction_setting_bundle.data_type.value)

    if reduction_setting_bundle.transmission_workspace is not None:
        reduction_alg.setProperty(
            "TransmissionWorkspace",
            reduction_setting_bundle.transmission_workspace)

    if reduction_setting_bundle.direct_workspace is not None:
        reduction_alg.setProperty("DirectWorkspace",
                                  reduction_setting_bundle.direct_workspace)

    reduction_alg.setProperty("OutputWorkspace", EMPTY_NAME)
    reduction_alg.setProperty("SumOfCounts", EMPTY_NAME)
    reduction_alg.setProperty("SumOfNormFactors", EMPTY_NAME)

    # Run the reduction core
    reduction_alg.execute()

    # Get the results
    output_workspace = reduction_alg.getProperty("OutputWorkspace").value
    output_workspace_count = reduction_alg.getProperty("SumOfCounts").value
    output_workspace_norm = reduction_alg.getProperty("SumOfNormFactors").value
    output_calculated_transmission_workspace = reduction_alg.getProperty(
        "CalculatedTransmissionWorkspace").value
    output_unfitted_transmission_workspace = reduction_alg.getProperty(
        "UnfittedTransmissionWorkspace").value
    # Pull the result out of the workspace
    output_bundle = OutputBundle(
        state=reduction_setting_bundle.state,
        data_type=reduction_setting_bundle.data_type,
        reduction_mode=reduction_setting_bundle.reduction_mode,
        output_workspace=output_workspace)

    output_parts_bundle = OutputPartsBundle(
        state=reduction_setting_bundle.state,
        data_type=reduction_setting_bundle.data_type,
        reduction_mode=reduction_setting_bundle.reduction_mode,
        output_workspace_count=output_workspace_count,
        output_workspace_norm=output_workspace_norm)

    output_transmission_bundle = OutputTransmissionBundle(
        state=reduction_setting_bundle.state,
        data_type=reduction_setting_bundle.data_type,
        calculated_transmission_workspace=
        output_calculated_transmission_workspace,
        unfitted_transmission_workspace=output_unfitted_transmission_workspace)
    return output_bundle, output_parts_bundle, output_transmission_bundle