Exemplo n.º 1
0
    def _process_command_queue(self, data_state):
        """
        Process the command queue sequentially as FIFO structure

        @param data_state: the data state.
        @return: a SANSState object.
        """
        file_name = data_state.sample_scatter
        file_information_factory = SANSFileInformationFactory()
        file_information = file_information_factory.create_sans_file_information(
            file_name)

        # If we have a clean instruction in there, then we should apply it to all commands
        self._apply_clean_if_required()

        # Evaluate all commands which adds them to the _processed_state_settings dictionary,
        # except for DataCommands which we deal with separately
        for command in self._commands:
            if isinstance(command, DataCommand):
                continue
            command_id = command.command_id
            process_function = self._method_map[command_id]
            process_function(command)

        user_commands = CommandInterfaceAdapter(
            data_info=data_state,
            processed_state=self._processed_state_settings)
        run_data_parser = StateRunDataBuilder(
            file_information=file_information)

        self._state_director = StateBuilder(i_state_parser=user_commands,
                                            run_data_builder=run_data_parser)
        return self._state_director.get_all_states()
Exemplo n.º 2
0
    def test_that_produces_correct_workspace_multiperiod_LARMOR(self):
        # Arrange
        # Build the data information
        file_information_factory = SANSFileInformationFactory()
        file_information = file_information_factory.create_sans_file_information(
            "LARMOR00013065")
        data_builder = get_data_builder(SANSFacility.ISIS, file_information)
        data_builder.set_sample_scatter("LARMOR00013065")
        data_builder.set_calibration(
            "80tubeCalibration_1-05-2015_r3157-3160.nxs")
        data_state = data_builder.build()

        # Get the rest of the state from the user file
        user_file = "USER_LARMOR_151B_LarmorTeam_80tubes_BenchRot1p4_M4_r3699.txt"
        user_file_director = StateBuilder.new_instance(
            data_information=data_state,
            file_information=file_information,
            user_filename=user_file)

        # Construct the final state
        state = user_file_director.get_all_states()

        # Act
        output_workspaces = run_integral('', True, IntegralEnum.Horizontal,
                                         DetectorType.LAB, state)

        # Evaluate it up to a defined point
        reference_file_name = "LARMOR_ws_diagnostic_reference.nxs"
        self._compare_workspace(output_workspaces[0], reference_file_name)
Exemplo n.º 3
0
    def test_that_produces_correct_workspace_for_SANS2D(self):
        # Arrange
        # Build the data information
        file_information_factory = SANSFileInformationFactory()
        file_information = file_information_factory.create_sans_file_information(
            "SANS2D00034484")
        data_builder = get_data_builder(SANSFacility.ISIS, file_information)
        data_builder.set_sample_scatter("SANS2D00034484")
        data_builder.set_calibration("TUBE_SANS2D_BOTH_31681_25Sept15.nxs")
        data_state = data_builder.build()

        # Get the rest of the state from the user file
        user_file = "USER_SANS2D_154E_2p4_4m_M3_Xpress_8mm_SampleChanger.txt"
        user_file_director = StateBuilder.new_instance(
            file_information=file_information,
            data_information=data_state,
            user_filename=user_file)
        state = user_file_director.get_all_states()
        state.compatibility.use_compatibility_mode = True

        # Act
        output_workspaces = run_integral('', True, IntegralEnum.Horizontal,
                                         DetectorType.LAB, state)
        self.assertEqual(len(output_workspaces), 1)
        # Evaluate it up to a defined point
        reference_file_name = "SANS2D_ws_centred_diagnostic_reference.nxs"
        self._compare_workspace(output_workspaces[0], reference_file_name)
Exemplo n.º 4
0
    def test_batch_reduction_on_period_time_sliced_wavelength_range_data(self):
        # Arrange
        # Build the data information
        file_information_factory = SANSFileInformationFactory()
        file_information = file_information_factory.create_sans_file_information(
            "SANS2D0005512")

        data_builder = get_data_builder(SANSFacility.ISIS, file_information)
        data_builder.set_sample_scatter("SANS2D0005512")
        data_builder.set_sample_scatter_period(1)

        data_info = data_builder.build()

        # Get the rest of the state from the user file
        user_filename = "MASKSANS2Doptions.091A"
        user_file_director = StateBuilder.new_instance(
            file_information=file_information,
            data_information=data_info,
            user_filename=user_filename)

        state = user_file_director.get_all_states()
        # Set the reduction mode to LAB
        state.reduction.reduction_mode = ReductionMode.LAB

        state.slice.start_time = [1.0, 3.0]
        state.slice.end_time = [3.0, 5.0]

        start = [1.0, 1.0]
        end = [3.0, 2.0]
        state.wavelength.wavelength_low = start
        state.wavelength.wavelength_high = end

        state.adjustment.normalize_to_monitor.wavelength_low = start
        state.adjustment.normalize_to_monitor.wavelength_high = end

        state.adjustment.calculate_transmission.wavelength_low = start
        state.adjustment.calculate_transmission.wavelength_high = end

        state.adjustment.wavelength_and_pixel_adjustment.wavelength_low = start
        state.adjustment.wavelength_and_pixel_adjustment.wavelength_high = end

        # Act
        states = [state]
        self._run_batch_reduction(states, use_optimizations=False)

        # Assert
        # We only assert that the expected workspaces exist on the ADS
        expected_workspaces = [
            "5512_p1rear_1D_1.0_2.0Phi-45.0_45.0_t1.00_T3.00",
            "5512_p1rear_1D_1.0_2.0Phi-45.0_45.0_t3.00_T5.00",
            "5512_p1rear_1D_1.0_3.0Phi-45.0_45.0_t1.00_T3.00",
            "5512_p1rear_1D_1.0_3.0Phi-45.0_45.0_t3.00_T5.00"
        ]
        for element in expected_workspaces:
            self.assertTrue(AnalysisDataService.doesExist(element))

        # Clean up
        for element in expected_workspaces:
            AnalysisDataService.remove(element)
    def test_that_single_reduction_evaluates_LAB_for_2D_reduction(self):
        # Arrange
        # Build the data information
        file_information_factory = SANSFileInformationFactory()
        file_information = file_information_factory.create_sans_file_information(
            "SANS2D00034484")
        data_builder = get_data_builder(SANSFacility.ISIS, file_information)
        data_builder.set_sample_scatter("SANS2D00034484")
        data_builder.set_sample_transmission("SANS2D00034505")
        data_builder.set_sample_direct("SANS2D00034461")
        data_builder.set_can_scatter("SANS2D00034481")
        data_builder.set_can_transmission("SANS2D00034502")
        data_builder.set_can_direct("SANS2D00034461")

        data_builder.set_calibration("TUBE_SANS2D_BOTH_31681_25Sept15.nxs")
        data_info = data_builder.build()

        # Get the rest of the state from the user file
        user_file = "USER_SANS2D_154E_2p4_4m_M3_Xpress_8mm_SampleChanger.txt"
        user_file_director = StateBuilder.new_instance(
            file_information=file_information,
            data_information=data_info,
            user_filename=user_file)

        state = user_file_director.get_all_states()

        # Set the reduction mode to LAB
        state.reduction.reduction_mode = ReductionMode.LAB
        state.reduction.reduction_dimensionality = ReductionDimensionality.TWO_DIM
        state.convert_to_q.reduction_dimensionality = ReductionDimensionality.TWO_DIM

        state.compatibility.use_compatibility_mode = True

        # Load the sample workspaces
        sample, sample_monitor, transmission_workspace, direct_workspace, can, can_monitor, \
        can_transmission, can_direct = self._load_workspace(state)  # noqa

        # Act
        output_settings = {"OutputWorkspaceLAB": EMPTY_NAME}
        single_reduction_alg = self._run_single_reduction(
            state,
            sample_scatter=sample,
            sample_transmission=transmission_workspace,
            sample_direct=direct_workspace,
            sample_monitor=sample_monitor,
            can_scatter=can,
            can_monitor=can_monitor,
            can_transmission=can_transmission,
            can_direct=can_direct,
            output_settings=output_settings)
        output_workspace = single_reduction_alg.getProperty(
            "OutputWorkspaceLAB").value

        # Compare the output of the reduction with the reference
        reference_file_name = "SANS2D_ws_D20_reference_LAB_2D.nxs"
        self._compare_to_reference(output_workspace, reference_file_name)
Exemplo n.º 6
0
    def test_batch_reduction_on_time_sliced_file(self):
        # Arrange
        # Build the data information
        file_information_factory = SANSFileInformationFactory()
        file_information = file_information_factory.create_sans_file_information(
            "SANS2D00034484")

        data_builder = get_data_builder(SANSFacility.ISIS, file_information)
        data_builder.set_sample_scatter("SANS2D00034484")
        data_builder.set_sample_transmission("SANS2D00034505")
        data_builder.set_sample_direct("SANS2D00034461")
        data_builder.set_can_scatter("SANS2D00034481")
        data_builder.set_can_transmission("SANS2D00034502")
        data_builder.set_can_direct("SANS2D00034461")

        data_builder.set_calibration("TUBE_SANS2D_BOTH_31681_25Sept15.nxs")

        data_info = data_builder.build()

        # Get the rest of the state from the user file
        user_filename = "USER_SANS2D_154E_2p4_4m_M3_Xpress_8mm_SampleChanger.txt"
        user_file_director = StateBuilder.new_instance(
            file_information=file_information,
            data_information=data_info,
            user_filename=user_filename)

        state = user_file_director.get_all_states()
        # Set the reduction mode to LAB
        state.reduction.reduction_mode = ReductionMode.LAB
        state.compatibility.use_compatibility_mode = True  # COMPATIBILITY BEGIN -- Remove when appropriate
        state.slice.start_time = [1.0, 3.0]
        state.slice.end_time = [3.0, 5.0]

        # Act
        states = [state]
        self._run_batch_reduction(states, use_optimizations=False)

        expected_workspaces = [
            "34484_rear_1D_1.75_16.5_t1.00_T3.00",
            "34484_rear_1D_1.75_16.5_t3.00_T5.00"
        ]
        reference_file_names = [
            "SANS2D_event_slice_referance_t1.00_T3.00.nxs",
            "SANS2D_event_slice_referance_t3.00_T5.00.nxs"
        ]

        for element, reference_file in zip(expected_workspaces,
                                           reference_file_names):
            self.assertTrue(AnalysisDataService.doesExist(element))
            # Evaluate it up to a defined point
            self._compare_workspace(element, reference_file)

        # Clean up
        for element in expected_workspaces:
            AnalysisDataService.remove(element)
    def test_that_beam_centre_core_produces_correct_workspaces(self):
        # Arrange
        # Build the data information
        file_information_factory = SANSFileInformationFactory()
        file_information = file_information_factory.create_sans_file_information(
            "SANS2D00034484")

        data_builder = get_data_builder(SANSFacility.ISIS, file_information)
        data_builder.set_sample_scatter("SANS2D00034484")
        data_builder.set_sample_transmission("SANS2D00034505")
        data_builder.set_sample_direct("SANS2D00034461")
        data_builder.set_calibration("TUBE_SANS2D_BOTH_31681_25Sept15.nxs")
        data_state = data_builder.build()

        # Get the rest of the state from the user file
        user_file = "USER_SANS2D_154E_2p4_4m_M3_Xpress_8mm_SampleChanger.txt"
        user_file_director = StateBuilder.new_instance(
            data_information=data_state,
            file_information=file_information,
            user_filename=user_file)
        state = user_file_director.get_all_states()

        state.compatibility.use_compatibility_mode = True

        # Load the sample workspaces
        workspace, workspace_monitor, transmission_workspace, direct_workspace = self._load_workspace(
            state)

        # Act
        reduction_core_alg = self._run_beam_centre_core(
            state, workspace, workspace_monitor, transmission_workspace,
            direct_workspace)
        output_workspace_left = reduction_core_alg.getProperty(
            "OutputWorkspaceLeft").value
        output_workspace_right = reduction_core_alg.getProperty(
            "OutputWorkspaceRight").value
        output_workspace_top = reduction_core_alg.getProperty(
            "OutputWorkspaceTop").value
        output_workspace_bottom = reduction_core_alg.getProperty(
            "OutputWorkspaceBottom").value

        # Evaluate it up to a defined point
        reference_file_name_left = "SANS2D_ws_D20_reference_left.nxs"
        reference_file_name_right = "SANS2D_ws_D20_reference_right.nxs"
        reference_file_name_top = "SANS2D_ws_D20_reference_top.nxs"
        reference_file_name_bottom = "SANS2D_ws_D20_reference_bottom.nxs"
        self._compare_workspace(output_workspace_left,
                                reference_file_name_left)
        self._compare_workspace(output_workspace_right,
                                reference_file_name_right)
        self._compare_workspace(output_workspace_top, reference_file_name_top)
        self._compare_workspace(output_workspace_bottom,
                                reference_file_name_bottom)
Exemplo n.º 8
0
    def test_that_reduction_core_evaluates_LAB(self):
        # Arrange
        # Build the data information
        file_information_factory = SANSFileInformationFactory()
        file_information = file_information_factory.create_sans_file_information(
            "SANS2D00034484")
        data_builder = get_data_builder(SANSFacility.ISIS, file_information)
        data_builder.set_sample_scatter("SANS2D00034484")
        data_builder.set_sample_transmission("SANS2D00034505")
        data_builder.set_sample_direct("SANS2D00034461")
        data_builder.set_calibration("TUBE_SANS2D_BOTH_31681_25Sept15.nxs")
        data_state = data_builder.build()

        # Get the rest of the state from the user file
        user_file = "USER_SANS2D_154E_2p4_4m_M3_Xpress_8mm_SampleChanger.txt"
        user_file_director = StateBuilder.new_instance(
            file_information=file_information,
            data_information=data_state,
            user_filename=user_file)
        state = user_file_director.get_all_states()

        state.compatibility.use_compatibility_mode = True

        # Load the sample workspaces
        workspace, workspace_monitor, transmission_workspace, direct_workspace = self._load_workspace(
            state)

        # Act
        reduction_core_alg = self._run_reduction_core(state, workspace,
                                                      workspace_monitor,
                                                      transmission_workspace,
                                                      direct_workspace)
        output_workspace = reduction_core_alg.getProperty(
            "OutputWorkspace").value
        calculated_transmission = reduction_core_alg.getProperty(
            "CalculatedTransmissionWorkspace").value
        unfitted_transmission = reduction_core_alg.getProperty(
            "UnfittedTransmissionWorkspace").value

        # Evaluate it up to a defined point
        reference_file_name = "SANS2D_ws_D20_reference.nxs"
        self._compare_workspace(output_workspace, reference_file_name)

        calculated_transmission_reference_file = "SANS2D_ws_D20_calculated_transmission_reference.nxs"
        unfitted_transmission_reference_file = "SANS2D_ws_D20_unfitted_transmission_reference.nxs"
        self._compare_workspace(calculated_transmission,
                                calculated_transmission_reference_file)
        self._compare_workspace(unfitted_transmission,
                                unfitted_transmission_reference_file)
Exemplo n.º 9
0
    def test_that_batch_reduction_evaluates_LAB(self):
        # Arrange
        # Build the data information
        file_information_factory = SANSFileInformationFactory()
        file_information = file_information_factory.create_sans_file_information(
            "SANS2D00034484")

        data_builder = get_data_builder(SANSFacility.ISIS, file_information)
        data_builder.set_sample_scatter("SANS2D00034484")
        data_builder.set_sample_transmission("SANS2D00034505")
        data_builder.set_sample_direct("SANS2D00034461")
        data_builder.set_can_scatter("SANS2D00034481")
        data_builder.set_can_transmission("SANS2D00034502")
        data_builder.set_can_direct("SANS2D00034461")

        data_builder.set_calibration("TUBE_SANS2D_BOTH_31681_25Sept15.nxs")

        data_info = data_builder.build()

        user_filename = "USER_SANS2D_154E_2p4_4m_M3_Xpress_8mm_SampleChanger.txt"

        user_file_director = StateBuilder.new_instance(
            file_information=file_information,
            data_information=data_info,
            user_filename=user_filename)

        # Get the rest of the state from the user file
        state = user_file_director.get_all_states()

        # Set the reduction mode to LAB
        state.reduction.reduction_mode = ReductionMode.LAB
        # Since we are dealing with event based data but we want to compare it with histogram data from the
        # old reduction system we need to enable the compatibility mode
        state.compatibility.use_compatibility_mode = True  # COMPATIBILITY BEGIN -- Remove when appropriate

        # Act
        states = [state]
        self._run_batch_reduction(states, use_optimizations=False)
        workspace_name = "34484_rear_1D_1.75_16.5"
        output_workspace = AnalysisDataService.retrieve(workspace_name)

        # Evaluate it up to a defined point
        reference_file_name = "SANS2D_ws_D20_reference_LAB_1D.nxs"
        self._compare_workspace(output_workspace, reference_file_name)

        if AnalysisDataService.doesExist(workspace_name):
            AnalysisDataService.remove(workspace_name)
Exemplo n.º 10
0
    def test_batch_reduction_on_multiperiod_file(self):
        # Arrange
        # Build the data information
        file_information_factory = SANSFileInformationFactory()
        file_information = file_information_factory.create_sans_file_information(
            "SANS2D0005512")
        data_builder = get_data_builder(SANSFacility.ISIS, file_information)
        data_builder.set_sample_scatter("SANS2D0005512")

        data_info = data_builder.build()

        # Get the rest of the state from the user file
        user_filename = "MASKSANS2Doptions.091A"
        user_file_parser = StateBuilder.new_instance(
            file_information=file_information,
            data_information=data_info,
            user_filename=user_filename)
        state = user_file_parser.get_all_states()
        # Set the reduction mode to LAB
        state.reduction.reduction_mode = ReductionMode.LAB

        # Act
        states = [state]
        self._run_batch_reduction(states, use_optimizations=False)

        # Assert
        # We only assert that the expected workspaces exist on the ADS
        expected_workspaces = [
            "5512_p{0}rear_1D_2.0_14.0Phi-45.0_45.0".format(i)
            for i in range(1, 14)
        ]
        for element in expected_workspaces:
            does_exist = AnalysisDataService.doesExist(element)
            self.assertTrue(does_exist,
                            msg="{0} was not found".format(element))

        # Clean up
        for element in expected_workspaces:
            AnalysisDataService.remove(element)
Exemplo n.º 11
0
    def test_that_single_reduction_evaluates_LAB(self):
        # Arrange
        # Build the data information
        file_information_factory = SANSFileInformationFactory()
        file_information = file_information_factory.create_sans_file_information(
            "SANS2D00034484")
        data_builder = get_data_builder(SANSFacility.ISIS, file_information)
        data_builder.set_sample_scatter("SANS2D00034484")
        data_builder.set_sample_transmission("SANS2D00034505")
        data_builder.set_sample_direct("SANS2D00034461")
        data_builder.set_can_scatter("SANS2D00034481")
        data_builder.set_can_transmission("SANS2D00034502")
        data_builder.set_can_direct("SANS2D00034461")

        data_builder.set_calibration("TUBE_SANS2D_BOTH_31681_25Sept15.nxs")
        data_info = data_builder.build()

        # Get the rest of the state from the user file
        user_file = "USER_SANS2D_154E_2p4_4m_M3_Xpress_8mm_SampleChanger.txt"
        user_file_director = StateBuilder.new_instance(
            file_information=file_information,
            data_information=data_info,
            user_filename=user_file)

        state = user_file_director.get_all_states()
        # Set the reduction mode to LAB
        state.reduction.reduction_mode = ReductionMode.LAB

        # Since we are dealing with event based data but we want to compare it with histogram data from the
        # old reduction system we need to enable the compatibility mode
        state.compatibility.use_compatibility_mode = True

        # Load the sample workspaces
        sample, sample_monitor, transmission_workspace, direct_workspace, can, can_monitor, \
        can_transmission, can_direct = self._load_workspace(state)  # noqa

        # Act
        output_settings = {"OutputWorkspaceLAB": EMPTY_NAME}
        single_reduction_alg = self._run_single_reduction(
            state,
            sample_scatter=sample,
            sample_transmission=transmission_workspace,
            sample_direct=direct_workspace,
            sample_monitor=sample_monitor,
            can_scatter=can,
            can_monitor=can_monitor,
            can_transmission=can_transmission,
            can_direct=can_direct,
            output_settings=output_settings)
        output_workspace = single_reduction_alg.getProperty(
            "OutputWorkspaceLAB").value
        calculated_transmission = single_reduction_alg.getProperty(
            "OutputWorkspaceCalculatedTransmission").value
        unfitted_transmission = single_reduction_alg.getProperty(
            "OutputWorkspaceUnfittedTransmission").value
        calculated_transmission_can = single_reduction_alg.getProperty(
            "OutputWorkspaceCalculatedTransmissionCan").value
        unfitted_transmission_can = single_reduction_alg.getProperty(
            "OutputWorkspaceUnfittedTransmissionCan").value

        # Compare the output of the reduction with the reference
        reference_file_name = "SANS2D_ws_D20_reference_LAB_1D.nxs"
        self._compare_to_reference(output_workspace, reference_file_name)

        calculated_transmission_reference_file = "SANS2D_ws_D20_calculated_transmission_reference_LAB.nxs"
        unfitted_transmission_reference_file = "SANS2D_ws_D20_unfitted_transmission_reference_LAB.nxs"
        calculated_transmission_reference_file_can = "SANS2D_ws_D20_calculated_transmission_reference_LAB_can.nxs"
        unfitted_transmission_reference_file_can = "SANS2D_ws_D20_unfitted_transmission_reference_LAB_can.nxs"
        self._compare_to_reference(calculated_transmission,
                                   calculated_transmission_reference_file,
                                   check_spectra_map=False)
        self._compare_to_reference(unfitted_transmission,
                                   unfitted_transmission_reference_file)
        self._compare_to_reference(calculated_transmission_can,
                                   calculated_transmission_reference_file_can,
                                   check_spectra_map=False)
        self._compare_to_reference(unfitted_transmission_can,
                                   unfitted_transmission_reference_file_can)
Exemplo n.º 12
0
    def test_that_single_reduction_evaluates_LAB(self):
        # Arrange
        # Build the data information
        file_information_factory = SANSFileInformationFactory()
        file_information = file_information_factory.create_sans_file_information(
            "SANS2D00034484")
        data_builder = get_data_builder(SANSFacility.ISIS, file_information)
        data_builder.set_sample_scatter("SANS2D00034484")
        data_builder.set_sample_transmission("SANS2D00034505")
        data_builder.set_sample_direct("SANS2D00034461")
        data_builder.set_can_scatter("SANS2D00034481")
        data_builder.set_can_transmission("SANS2D00034502")
        data_builder.set_can_direct("SANS2D00034461")

        data_builder.set_calibration("TUBE_SANS2D_BOTH_31681_25Sept15.nxs")
        data_info = data_builder.build()

        # Get the rest of the state from the user file
        user_file = "USER_SANS2D_154E_2p4_4m_M3_Xpress_8mm_SampleChanger.txt"
        user_file_director = StateBuilder.new_instance(
            file_information=file_information,
            data_information=data_info,
            user_filename=user_file)
        # Set the reduction mode to LAB
        state = user_file_director.get_all_states()
        state.reduction.reduction_mode = ReductionMode.LAB
        state.compatibility.use_compatibility_mode = False

        # Add some event slices
        state.slice.start_time = [0.00, 300.00]
        state.slice.end_time = [300.00, 600.00]

        # Load the sample workspaces
        sample, sample_monitor, transmission_workspace, direct_workspace, can, can_monitor,\
        can_transmission, can_direct = self._load_workspace(state)  # noqa

        # Act
        output_settings = {"OutputWorkspaceLAB": EMPTY_NAME}
        single_reduction_v2_alg = self._run_single_reduction(
            state,
            sample_scatter=sample,
            sample_transmission=transmission_workspace,
            sample_direct=direct_workspace,
            sample_monitor=sample_monitor,
            can_scatter=can,
            can_monitor=can_monitor,
            can_transmission=can_transmission,
            can_direct=can_direct,
            output_settings=output_settings,
            event_slice_optimisation=True,
            save_can=True,
            use_optimizations=True)

        # Check output workspaces
        output_workspace = single_reduction_v2_alg.getProperty(
            "OutputWorkspaceLAB").value
        lab_can = single_reduction_v2_alg.getProperty(
            "OutputWorkspaceLABCan").value
        lab_sample = single_reduction_v2_alg.getProperty(
            "OutputWorkspaceLABSample").value
        lab_can_count = single_reduction_v2_alg.getProperty(
            "OutputWorkspaceLABCanCount").value
        lab_can_norm = single_reduction_v2_alg.getProperty(
            "OutputWorkspaceLABCanNorm").value

        self._assert_group_workspace(output_workspace)
        self._assert_group_workspace(lab_can)
        self._assert_group_workspace(lab_sample)
        self._assert_group_workspace(lab_can_count)
        self._assert_group_workspace(lab_can_norm)

        # ---------------------------------------------------
        # Comparison test with version 1
        # This can be removed once version 2 has been adopted
        # ---------------------------------------------------
        # Run the first event slice
        state.slice.start_time = [0.00]
        state.slice.end_time = [300.00]

        single_reduction_alg_first_slice = self._run_single_reduction(
            state,
            sample_scatter=sample,
            sample_transmission=transmission_workspace,
            sample_direct=direct_workspace,
            sample_monitor=sample_monitor,
            can_scatter=can,
            can_monitor=can_monitor,
            can_transmission=can_transmission,
            can_direct=can_direct,
            output_settings=output_settings,
            event_slice_optimisation=False,
            save_can=True)

        # Run the second event slice
        state.slice.start_time = [300.00]
        state.slice.end_time = [600.00]

        single_reduction_alg_second_slice = self._run_single_reduction(
            state,
            sample_scatter=sample,
            sample_transmission=transmission_workspace,
            sample_direct=direct_workspace,
            sample_monitor=sample_monitor,
            can_scatter=can,
            can_monitor=can_monitor,
            can_transmission=can_transmission,
            can_direct=can_direct,
            output_settings=output_settings,
            event_slice_optimisation=False,
            save_can=True)
        # Now compare output workspaces from the two versions
        # Output LAB workspace
        event_slice_output_workspace = single_reduction_v2_alg.getProperty(
            "OutputWorkspaceLAB").value
        first_slice_output_workspace = single_reduction_alg_first_slice.getProperty(
            "OutputWorkspaceLAB").value
        second_slice_output_workspace = single_reduction_alg_second_slice.getProperty(
            "OutputWorkspaceLAB").value

        self._compare_workspace(event_slice_output_workspace[0],
                                first_slice_output_workspace,
                                tolerance=1e-6)
        self._compare_workspace(event_slice_output_workspace[1],
                                second_slice_output_workspace,
                                tolerance=1e-6)

        # LAB sample
        event_slice_output_sample = single_reduction_v2_alg.getProperty(
            "OutputWorkspaceLABSample").value
        first_slice_output_sample = single_reduction_alg_first_slice.getProperty(
            "OutputWorkspaceLABSample").value
        second_slice_output_sample = single_reduction_alg_second_slice.getProperty(
            "OutputWorkspaceLABSample").value

        self._compare_workspace(event_slice_output_sample[0],
                                first_slice_output_sample,
                                tolerance=1e-6)
        self._compare_workspace(event_slice_output_sample[1],
                                second_slice_output_sample,
                                tolerance=1e-6)

        # LAB can
        event_slice_output_can = single_reduction_v2_alg.getProperty(
            "OutputWorkspaceLABCan").value
        first_slice_output_can = single_reduction_alg_first_slice.getProperty(
            "OutputWorkspaceLABCan").value
        second_slice_output_can = single_reduction_alg_second_slice.getProperty(
            "OutputWorkspaceLABCan").value

        self._compare_workspace(event_slice_output_can[0],
                                first_slice_output_can,
                                tolerance=1e-6)
        self._compare_workspace(event_slice_output_can[1],
                                second_slice_output_can,
                                tolerance=1e-6)
Exemplo n.º 13
0
    def test_similarity_between_results_in_compatibility_mode_and_non_compatibility_mode(self):
        # Arrange
        # Build the data information
        file_information_factory = SANSFileInformationFactory()
        file_information = file_information_factory.create_sans_file_information("SANS2D00034484")
        data_builder = get_data_builder(SANSFacility.ISIS, file_information)
        data_builder.set_sample_scatter("SANS2D00034484")
        data_builder.set_sample_transmission("SANS2D00034505")
        data_builder.set_sample_direct("SANS2D00034461")
        data_builder.set_calibration("TUBE_SANS2D_BOTH_31681_25Sept15.nxs")
        data_state = data_builder.build()

        ################################################################################################################
        # Compatibility mode
        ################################################################################################################
        # Get the rest of the state from the user file
        user_file = "USER_SANS2D_154E_2p4_4m_M3_Xpress_8mm_SampleChanger.txt"
        user_file_director = StateBuilder.new_instance(file_information=file_information,
                                                       data_information=data_state,
                                                       user_filename=user_file)

        state = user_file_director.get_all_states()
        state.compatibility.use_compatibility_mode = True

        # Load the sample workspaces
        workspace, workspace_monitor, transmission_workspace, direct_workspace = self._load_workspace(state)

        # Act
        reduction_core_alg = self._run_reduction_core(state, workspace, workspace_monitor,
                                                      transmission_workspace, direct_workspace)
        compatibility_output_workspace = reduction_core_alg.getProperty("OutputWorkspace").value

        ################################################################################################################
        # Non-compatibility mode
        ################################################################################################################
        user_file = "USER_SANS2D_154E_2p4_4m_M3_Xpress_8mm_SampleChanger.txt"
        user_file_director = StateBuilder.new_instance(file_information=file_information,
                                                       data_information=data_state,
                                                       user_filename=user_file)
        state = user_file_director.get_all_states()
        state.compatibility.use_compatibility_mode = False

        # Load the sample workspaces
        workspace, workspace_monitor, transmission_workspace, direct_workspace = self._load_workspace(state)

        # Act
        reduction_core_alg = self._run_reduction_core(state, workspace, workspace_monitor,
                                                      transmission_workspace, direct_workspace)
        non_compatibility_output_workspace = reduction_core_alg.getProperty("OutputWorkspace").value

        ################################################################################################################
        # Compare workspaces
        ################################################################################################################
        compare_name = "CompareWorkspaces"
        compare_options = {"Workspace1": non_compatibility_output_workspace,
                           "Workspace2": compatibility_output_workspace,
                           "Tolerance": 1,
                           "CheckInstrument": False,
                           "CheckSample": False,
                           "ToleranceRelErr": True,
                           "CheckAllData": True,
                           "CheckMasking": True,
                           "CheckType": True,
                           "CheckAxes": True,
                           "CheckSpectraMap": True}
        compare_alg = create_unmanaged_algorithm(compare_name, **compare_options)
        compare_alg.setChild(False)
        compare_alg.execute()
        result = compare_alg.getProperty("Result").value
        self.assertTrue(result)
Exemplo n.º 14
0
 def setUp(self):
     self.file_parser = mock.create_autospec(spec=IStateParser)
     self.data_parser = mock.create_autospec(spec=StateRunDataBuilder)
     self.instance = StateBuilder(run_data_builder=self.data_parser,
                                  i_state_parser=self.file_parser)
Exemplo n.º 15
0
    def create_state(self, row_entry, file_lookup=True, user_file=""):
        # 1. Get the data settings, such as sample_scatter, etc... and create the data state.
        if file_lookup:
            file_information = row_entry.file_information
        else:
            file_information = SANSFileInformationBlank()

        data_builder = get_data_builder(self._facility, file_information)

        self._set_data_entry(data_builder.set_sample_scatter,
                             row_entry.sample_scatter)
        self._set_data_period_entry(data_builder.set_sample_scatter_period,
                                    row_entry.sample_scatter_period)
        self._set_data_entry(data_builder.set_sample_transmission,
                             row_entry.sample_transmission)
        self._set_data_period_entry(
            data_builder.set_sample_transmission_period,
            row_entry.sample_transmission_period)  # noqa
        self._set_data_entry(data_builder.set_sample_direct,
                             row_entry.sample_direct)
        self._set_data_period_entry(data_builder.set_sample_direct_period,
                                    row_entry.sample_direct_period)
        self._set_data_entry(data_builder.set_can_scatter,
                             row_entry.can_scatter)
        self._set_data_period_entry(data_builder.set_can_scatter_period,
                                    row_entry.can_scatter_period)
        self._set_data_entry(data_builder.set_can_transmission,
                             row_entry.can_transmission)
        self._set_data_period_entry(data_builder.set_can_transmission_period,
                                    row_entry.can_transmission_period)
        self._set_data_entry(data_builder.set_can_direct, row_entry.can_direct)
        self._set_data_period_entry(data_builder.set_can_direct_period,
                                    row_entry.can_direct_period)

        data = data_builder.build()

        # 2. Add elements from the options column
        state_gui_model = copy.deepcopy(self._state_gui_model)
        self._apply_column_options_to_state(row_entry, state_gui_model)

        # 3. Add other columns
        output_name = row_entry.output_name
        if output_name:
            state_gui_model.output_name = output_name

        if row_entry.sample_thickness:
            state_gui_model.sample_thickness = float(
                row_entry.sample_thickness)
        if row_entry.sample_height:
            state_gui_model.sample_height = float(row_entry.sample_height)
        if row_entry.sample_width:
            state_gui_model.sample_width = float(row_entry.sample_width)
        if row_entry.sample_shape:
            state_gui_model.sample_shape = row_entry.sample_shape

        # 4. Create the rest of the state based on the builder.
        settings = copy.deepcopy(state_gui_model.settings)
        command_interface = CommandInterfaceAdapter(data_info=data,
                                                    processed_state=settings)
        run_data_builder = StateRunDataBuilder(
            file_information=file_information)

        state = StateBuilder(
            run_data_builder=run_data_builder,
            i_state_parser=command_interface).get_all_states()
        return state
Exemplo n.º 16
0
class CommandInterfaceStateDirector(object):
    def __init__(self, facility):
        super(CommandInterfaceStateDirector, self).__init__()
        self._commands = []
        self._state_director = None

        self._processed_state_settings = {}

        self._facility = facility
        self._method_map = None
        self._set_up_method_map()

    def add_command(self, command):
        self._commands.append(command)

    def clear_commands(self):
        self._commands = []
        self._processed_state_settings = {}

    def process_commands(self):
        """
        Here we process the commands that have been set. This would be triggered by a command which requests a reduction

        The execution strategy is:
        1. Find the data entries and great a SANSStateData object out of them
        2. Go sequentially through the commands in a FIFO manner (except for the data entries)
        3. Delete the processed state settings. We only need to retain the commands. If we also retain the
           processed state settings then we will populate some entries twice.
        4. Returns the constructed state
        @returns a list of valid SANSState object which can be used for data reductions or raises an exception.
        """
        # 1. Get a SANSStateData object.
        data_state = self._get_data_state()

        # 2. Go through
        state = self._process_command_queue(data_state)

        # 3. Leave commands in place put clear the list of processed commands, else they will be reused.
        self._processed_state_settings = {}

        # 4. Provide the state
        return state

    def get_commands(self):
        return self._commands

    def _get_data_state(self):
        # Get the data commands
        data_commands = self._get_data_commands()
        data_elements = self._get_elements_with_key(
            DataCommandId.SAMPLE_SCATTER, data_commands)
        data_element = data_elements[-1]
        file_name = data_element.file_name
        file_information_factory = SANSFileInformationFactory()
        file_information = file_information_factory.create_sans_file_information(
            file_name)

        # Build the state data
        data_builder = get_data_builder(self._facility, file_information)
        self._set_data_element(data_builder.set_sample_scatter,
                               data_builder.set_sample_scatter_period,
                               DataCommandId.SAMPLE_SCATTER, data_commands)
        self._set_data_element(data_builder.set_sample_transmission,
                               data_builder.set_sample_transmission_period,
                               DataCommandId.SAMPLE_TRANSMISSION,
                               data_commands)
        self._set_data_element(data_builder.set_sample_direct,
                               data_builder.set_sample_direct_period,
                               DataCommandId.SAMPLE_DIRECT, data_commands)
        self._set_data_element(data_builder.set_can_scatter,
                               data_builder.set_can_scatter_period,
                               DataCommandId.CAN_SCATTER, data_commands)
        self._set_data_element(data_builder.set_can_transmission,
                               data_builder.set_can_transmission_period,
                               DataCommandId.CAN_TRANSMISSION, data_commands)
        self._set_data_element(data_builder.set_can_direct,
                               data_builder.set_can_direct_period,
                               DataCommandId.CAN_DIRECT, data_commands)

        return data_builder.build()

    def _get_data_commands(self):
        """
        Grabs and removes the data commands from the command queue.

        @return: a list of data commands
        """
        # Grab the data commands
        data_commands = [
            element for element in self._commands
            if isinstance(element, DataCommand)
        ]
        return data_commands

    def _set_data_element(self, data_builder_file_setter,
                          data_builder_period_setter, command_id, commands):
        """
        Sets a data element (e.g. sample scatter file and sample scatter period) on the data builder.

        @param data_builder_file_setter: a handle to the correct setter for the file on the data builder.
        @param data_builder_period_setter: a handle to the correct setter for the period on the data builder.
        @param command_id: the command id
        @param commands: a list of commands.
        """
        data_elements = self._get_elements_with_key(command_id, commands)

        # If there is no element, then there is nothing to do
        if len(data_elements) == 0:
            return

        # If there is more than one element, then we are only interested in the last element. The user could
        # have overridden his wishes, e.g.
        # ...
        # AssignSample('SANS2D1234')
        # ...
        # AssignSample('SANS2D4321')
        # ...
        # We select therefore the last element
        data_element = data_elements[-1]
        file_name = data_element.file_name
        period = data_element.period
        data_builder_file_setter(file_name)
        data_builder_period_setter(period)

    @staticmethod
    def _get_elements_with_key(command_id, command_list):
        """
        Get all elements in the command list with a certain id

        @param command_id: the id of the command.
        @param command_list: a list of commands.
        @return: a list of commands which match the id.
        """
        return [
            element for element in command_list
            if element.command_id is command_id
        ]

    def _process_command_queue(self, data_state):
        """
        Process the command queue sequentially as FIFO structure

        @param data_state: the data state.
        @return: a SANSState object.
        """
        file_name = data_state.sample_scatter
        file_information_factory = SANSFileInformationFactory()
        file_information = file_information_factory.create_sans_file_information(
            file_name)

        # If we have a clean instruction in there, then we should apply it to all commands
        self._apply_clean_if_required()

        # Evaluate all commands which adds them to the _processed_state_settings dictionary,
        # except for DataCommands which we deal with separately
        for command in self._commands:
            if isinstance(command, DataCommand):
                continue
            command_id = command.command_id
            process_function = self._method_map[command_id]
            process_function(command)

        user_commands = CommandInterfaceAdapter(
            data_info=data_state,
            processed_state=self._processed_state_settings)
        run_data_parser = StateRunDataBuilder(
            file_information=file_information)

        self._state_director = StateBuilder(i_state_parser=user_commands,
                                            run_data_builder=run_data_parser)
        return self._state_director.get_all_states()

    def _set_up_method_map(self):
        """
        Sets up a mapping between command ids and the adequate processing methods which can handle the command.
        """
        self._method_map = {
            NParameterCommandId.USER_FILE:
            self._process_user_file,
            NParameterCommandId.MASK:
            self._process_mask,
            NParameterCommandId.INCIDENT_SPECTRUM:
            self._process_incident_spectrum,
            NParameterCommandId.CLEAN:
            self._process_clean,
            NParameterCommandId.REDUCTION_DIMENSIONALITY:
            self._process_reduction_dimensionality,
            NParameterCommandId.SAMPLE_OFFSET:
            self._process_sample_offset,
            NParameterCommandId.DETECTOR:
            self._process_detector,
            NParameterCommandId.GRAVITY:
            self._process_gravity,
            NParameterCommandId.CENTRE:
            self._process_centre,
            NParameterCommandId.TRANS_FIT:
            self._process_trans_fit,
            NParameterCommandId.FRONT_DETECTOR_RESCALE:
            self._process_front_detector_rescale,
            NParameterCommandId.EVENT_SLICES:
            self._process_event_slices,
            NParameterCommandId.FLOOD_FILE:
            self._process_flood_file,
            NParameterCommandId.PHI_LIMIT:
            self._process_phi_limit,
            NParameterCommandId.WAVELENGTH_CORRECTION_FILE:
            self._process_wavelength_correction_file,
            NParameterCommandId.MASK_RADIUS:
            self._process_mask_radius,
            NParameterCommandId.WAVELENGTH_LIMIT:
            self._process_wavelength_limit,
            NParameterCommandId.QXY_LIMIT:
            self._process_qxy_limit,
            NParameterCommandId.WAV_RANGE_SETTINGS:
            self._process_wavrange,
            NParameterCommandId.COMPATIBILITY_MODE:
            self._process_compatibility_mode,
            NParameterCommandId.DETECTOR_OFFSETS:
            self._process_detector_offsets,
            NParameterCommandId.SAVE:
            self._process_save,
            NParameterCommandId.USER_SPECIFIED_OUTPUT_NAME:
            self._process_user_specified_output_name,
            NParameterCommandId.USER_SPECIFIED_OUTPUT_NAME_SUFFIX:
            self._process_user_specified_output_name_suffix,
            NParameterCommandId.USE_REDUCTION_MODE_AS_SUFFIX:
            self._process_use_reduction_mode_as_suffix
        }

    def add_to_processed_state_settings(self,
                                        new_state_settings,
                                        treat_list_as_element=False):
        """
        Adds the new entries to the already processed state settings

        @param new_state_settings: a dictionary with new entries for the processed state settings
        @param treat_list_as_element: if we have a list and add it for the fist time, then we should treat it as an
                                      element if true. For example, if the state is [1, 2] the a new settint would, be
                                      [[1, 2,]] and not [1, 2]. With a further entry it could be [[1,2], [3,4]].
        """
        for key, value in list(new_state_settings.items()):
            # Add the new entry
            # 1. A similar entry can already exist, then append it (or extend it)
            # 2. The entry does not exist, but it is in form of a list (you would get that for example when
            #    dealing with input from the UserFileReader
            # 3. The entry does not exist and is not in a list. In this case we need to add it to a list.
            if key in self._processed_state_settings:
                # If the key already exists then we have to be careful. We have the current value V = [A, B, ...]
                # and our new element N
                # i. If the existing entries (ie A, ...) are not lists and N is not a list, then append to V.
                # ii. If the existing entries (ie A, ...) are not lists and N is a list then extend V.
                # iii. If the existing entries (ie A, ...) are lists and N is a list then append to V.
                # iv. If the existing entries (ie A, ...) are lists and N is not a list, then raise
                # The reason we have to be careful is that we might get an N from a user file which comes always already
                # in the form of a list.
                old_values = self._processed_state_settings[key]
                is_old_first_entry_a_list = isinstance(old_values[0], list)
                is_new_entry_a_list = isinstance(value, list)

                if not is_old_first_entry_a_list and not is_new_entry_a_list:
                    old_values.append(value)
                elif not is_old_first_entry_a_list and is_new_entry_a_list:
                    old_values.extend(value)
                elif is_old_first_entry_a_list and is_new_entry_a_list:
                    old_values.append(value)
                else:
                    raise RuntimeError(
                        "CommandInterfaceStateDirector: Trying to insert {0} which is a list into {0} "
                        "which is collection of non-list elements".format(
                            value, old_values))
            elif isinstance(value, list) and treat_list_as_element:
                self._processed_state_settings.update({key: [value]})
            elif isinstance(value, list):
                self._processed_state_settings.update({key: value})
            else:
                self._processed_state_settings.update({key: [value]})

    def _process_user_file(self, command):
        """
        Processes a user file and retain the parased tags

        @param command: the command with the user file path
        """
        file_name = command.values[0]
        user_file_reader = UserFileReader(file_name)
        new_state_entries = user_file_reader.read_user_file()
        self.add_to_processed_state_settings(new_state_entries)

    def _process_mask(self, command):
        """
        We need to process a mask line as specified in the user file.
        """
        mask_command = command.values[0]
        # Use the user file parser to extract the values from the user file setting.
        user_file_parser = UserFileParser()
        parsed_output = user_file_parser.parse_line(mask_command)
        self.add_to_processed_state_settings(parsed_output)

    def _process_incident_spectrum(self, command):
        incident_monitor = command.values[0]
        interpolate = command.values[1]
        is_trans = command.values[2]
        new_state_entries = {
            MonId.SPECTRUM:
            monitor_spectrum(spectrum=incident_monitor,
                             is_trans=is_trans,
                             interpolate=interpolate)
        }
        self.add_to_processed_state_settings(new_state_entries)

    def _apply_clean_if_required(self):
        """
        The cleans all commands up to the clean command point.

        We have to do this clean before we start processing the elements.
        """
        index_first_clean_command = None
        for index in reversed(list(range(0, len(self._commands)))):
            element = self._commands[index]
            if element.command_id == NParameterCommandId.CLEAN:
                index_first_clean_command = index
                break
        if index_first_clean_command is not None:
            del (self._commands[0:(index_first_clean_command + 1)])
            self._processed_state_settings = {}

    def _process_clean(self, command):
        _ = command  # noqa
        raise RuntimeError(
            "Trying the process a Clean command. The clean command should have removed itself and "
            "all previous commands. If it is still here, then this is a bug")

    def _process_reduction_dimensionality(self, command):
        _ = command  # noqa
        reduction_dimensionality = command.values[0]
        new_state_entries = {
            OtherId.REDUCTION_DIMENSIONALITY: reduction_dimensionality
        }
        self.add_to_processed_state_settings(new_state_entries)

    def _process_sample_offset(self, command):
        sample_offset = command.values[0]
        new_state_entries = {SampleId.OFFSET: sample_offset}
        self.add_to_processed_state_settings(new_state_entries)

    def _process_detector(self, command):
        reduction_mode = command.values[0]
        new_state_entries = {DetectorId.REDUCTION_MODE: reduction_mode}
        self.add_to_processed_state_settings(new_state_entries)

    def _process_gravity(self, command):
        use_gravity = command.values[0]
        extra_length = command.values[1]
        new_state_entries = {
            GravityId.ON_OFF: use_gravity,
            GravityId.EXTRA_LENGTH: extra_length
        }
        self.add_to_processed_state_settings(new_state_entries)

    def _process_centre(self, command):
        pos1 = command.values[0]
        pos2 = command.values[1]
        detector_type = command.values[2]
        new_state_entries = {
            SetId.CENTRE:
            position_entry(pos1=pos1, pos2=pos2, detector_type=detector_type)
        }
        self.add_to_processed_state_settings(new_state_entries)

    def _process_trans_fit(self, command):
        def fit_type_to_data_type(fit_type_to_convert):
            return DataType.CAN if fit_type_to_convert is FitData.Can else DataType.SAMPLE

        fit_data = command.values[0]
        wavelength_low = command.values[1]
        wavelength_high = command.values[2]
        fit_type = command.values[3]
        polynomial_order = command.values[4]
        if fit_data is FitData.Both:
            data_to_fit = [FitData.Sample, FitData.Can]
        else:
            data_to_fit = [fit_data]

        new_state_entries = {}
        for element in data_to_fit:
            data_type = fit_type_to_data_type(element)
            new_state_entries.update({
                FitId.GENERAL:
                fit_general(start=wavelength_low,
                            stop=wavelength_high,
                            fit_type=fit_type,
                            data_type=data_type,
                            polynomial_order=polynomial_order)
            })
        self.add_to_processed_state_settings(new_state_entries)

    def _process_front_detector_rescale(self, command):
        scale = command.values[0]
        shift = command.values[1]
        fit_scale = command.values[2]
        fit_shift = command.values[3]
        q_min = command.values[4]
        q_max = command.values[5]

        # Set the scale and the shift
        new_state_entries = {
            DetectorId.RESCALE: scale,
            DetectorId.SHIFT: shift
        }

        # Set the fit for the scale
        new_state_entries.update({
            DetectorId.RESCALE_FIT:
            det_fit_range(start=q_min, stop=q_max, use_fit=fit_scale)
        })

        # Set the fit for shift
        new_state_entries.update({
            DetectorId.SHIFT_FIT:
            det_fit_range(start=q_min, stop=q_max, use_fit=fit_shift)
        })

        self.add_to_processed_state_settings(new_state_entries)

    def _process_event_slices(self, command):
        event_slice_value = command.values
        new_state_entries = {
            OtherId.EVENT_SLICES:
            event_binning_string_values(value=event_slice_value)
        }
        self.add_to_processed_state_settings(new_state_entries)

    def _process_flood_file(self, command):
        file_path = command.values[0]
        detector_type = command.values[1]
        new_state_entries = {
            MonId.FLAT:
            monitor_file(file_path=file_path, detector_type=detector_type)
        }
        self.add_to_processed_state_settings(new_state_entries)

    def _process_phi_limit(self, command):
        phi_min = command.values[0]
        phi_max = command.values[1]
        use_phi_mirror = command.values[2]
        new_state_entries = {
            LimitsId.ANGLE:
            mask_angle_entry(min=phi_min,
                             max=phi_max,
                             use_mirror=use_phi_mirror)
        }
        self.add_to_processed_state_settings(new_state_entries)

    def _process_wavelength_correction_file(self, command):
        file_path = command.values[0]
        detector_type = command.values[1]
        new_state_entries = {
            MonId.DIRECT:
            monitor_file(file_path=file_path, detector_type=detector_type)
        }
        self.add_to_processed_state_settings(new_state_entries)

    def _process_mask_radius(self, command):
        radius_min = command.values[0]
        radius_max = command.values[1]
        new_state_entries = {
            LimitsId.RADIUS: range_entry(start=radius_min, stop=radius_max)
        }
        self.add_to_processed_state_settings(new_state_entries)

    def _process_wavelength_limit(self, command):
        wavelength_low = command.values[0]
        wavelength_high = command.values[1]
        wavelength_step = command.values[2]
        wavelength_step_type = command.values[3]
        new_state_entries = {
            LimitsId.WAVELENGTH:
            simple_range(start=wavelength_low,
                         stop=wavelength_high,
                         step=wavelength_step,
                         step_type=wavelength_step_type)
        }
        self.add_to_processed_state_settings(new_state_entries)

    def _process_wavrange(self, command):
        wavelength_low = command.values[0]
        wavelength_high = command.values[1]
        full_wavelength_range = command.values[2]
        reduction_mode = command.values[3]

        # Update the lower and the upper wavelength values. Note that this is considered an incomplete setting, since
        # not step or step type have been specified. This means we need to update one of the processed commands, which
        # is not nice but the command interface forces us to do so. We take a copy of the last LimitsId.wavelength
        # entry, we copy it and then change the desired settings. This means it has to be set at this point, else
        # something is wrong
        if LimitsId.WAVELENGTH in self._processed_state_settings:
            last_entry = self._processed_state_settings[
                LimitsId.WAVELENGTH][-1]

            new_wavelength_low = wavelength_low if wavelength_low is not None else last_entry.start
            new_wavelength_high = wavelength_high if wavelength_high is not None else last_entry.stop
            new_range = simple_range(start=new_wavelength_low,
                                     stop=new_wavelength_high,
                                     step=last_entry.step,
                                     step_type=last_entry.step_type)

            if wavelength_low is not None or wavelength_high is not None:
                copied_entry = {LimitsId.WAVELENGTH: new_range}
                self.add_to_processed_state_settings(copied_entry)
        else:
            raise RuntimeError(
                "CommandInterfaceStateDirector: Setting the lower and upper wavelength bounds is not"
                " possible. We require also a step and step range")

        if full_wavelength_range is not None:
            full_wavelength_range_entry = {
                OtherId.USE_FULL_WAVELENGTH_RANGE: full_wavelength_range
            }
            self.add_to_processed_state_settings(full_wavelength_range_entry)

        if reduction_mode is not None:
            reduction_mode_entry = {DetectorId.REDUCTION_MODE: reduction_mode}
            self.add_to_processed_state_settings(reduction_mode_entry)

    def _process_qxy_limit(self, command):
        q_min = command.values[0]
        q_max = command.values[1]
        q_step = command.values[2]
        q_step_type = command.values[3]
        new_state_entries = {
            LimitsId.QXY:
            simple_range(start=q_min,
                         stop=q_max,
                         step=q_step,
                         step_type=q_step_type)
        }
        self.add_to_processed_state_settings(new_state_entries)

    def _process_compatibility_mode(self, command):
        use_compatibility_mode = command.values[0]
        new_state_entries = {
            OtherId.USE_COMPATIBILITY_MODE: use_compatibility_mode
        }
        self.add_to_processed_state_settings(new_state_entries)

    def _process_detector_offsets(self, command):
        detector_type = command.values[0]
        x = command.values[1]
        y = command.values[2]
        z = command.values[3]
        rotation = command.values[4]
        radius = command.values[5]
        side = command.values[6]
        x_tilt = command.values[7]
        y_tilt = command.values[8]

        # Set the offsets
        new_state_entries = {
            DetectorId.CORRECTION_X:
            single_entry_with_detector(entry=x, detector_type=detector_type),
            DetectorId.CORRECTION_Y:
            single_entry_with_detector(entry=y, detector_type=detector_type),
            DetectorId.CORRECTION_Z:
            single_entry_with_detector(entry=z, detector_type=detector_type),
            DetectorId.CORRECTION_ROTATION:
            single_entry_with_detector(entry=rotation,
                                       detector_type=detector_type),
            DetectorId.CORRECTION_RADIUS:
            single_entry_with_detector(entry=radius,
                                       detector_type=detector_type),
            DetectorId.CORRECTION_TRANSLATION:
            single_entry_with_detector(entry=side,
                                       detector_type=detector_type),
            DetectorId.CORRECTION_X_TILT:
            single_entry_with_detector(entry=x_tilt,
                                       detector_type=detector_type),
            DetectorId.CORRECTION_Y_TILT:
            single_entry_with_detector(entry=y_tilt,
                                       detector_type=detector_type),
        }
        self.add_to_processed_state_settings(new_state_entries)

    def _process_save(self, command):
        save_algorithms = command.values[0]
        save_as_zero_error_free = command.values[1]
        new_state_entries = {
            OtherId.SAVE_TYPES: save_algorithms,
            OtherId.SAVE_AS_ZERO_ERROR_FREE: save_as_zero_error_free
        }
        self.add_to_processed_state_settings(new_state_entries,
                                             treat_list_as_element=True)

    def _process_user_specified_output_name(self, command):
        user_specified_output_name = command.values[0]
        new_state_entry = {
            OtherId.USER_SPECIFIED_OUTPUT_NAME: user_specified_output_name
        }
        self.add_to_processed_state_settings(new_state_entry)

    def _process_user_specified_output_name_suffix(self, command):
        user_specified_output_name_suffix = command.values[0]
        new_state_entry = {
            OtherId.USER_SPECIFIED_OUTPUT_NAME_SUFFIX:
            user_specified_output_name_suffix
        }
        self.add_to_processed_state_settings(new_state_entry)

    def _process_use_reduction_mode_as_suffix(self, command):
        use_reduction_mode_as_suffix = command.values[0]
        new_state_entry = {
            OtherId.USE_REDUCTION_MODE_AS_SUFFIX: use_reduction_mode_as_suffix
        }
        self.add_to_processed_state_settings(new_state_entry)

    def remove_last_user_file(self):
        """
        Removes the last added user file from the commands.

        See _remove_last_element for further explanation.
        """
        self._remove_last_element(NParameterCommandId.USER_FILE)

    def remove_last_scatter_sample(self):
        """
        Removes the last added scatter sample from the commands.

        See _remove_last_element for further explanation.
        """
        self._remove_last_element(DataCommandId.SAMPLE_SCATTER)

    def remove_last_sample_transmission_and_direct(self):
        """
        Removes the last added scatter transmission and direct from the commands.

        See _remove_last_element for further explanation.
        """
        self._remove_last_element(DataCommandId.SAMPLE_TRANSMISSION)
        self._remove_last_element(DataCommandId.SAMPLE_DIRECT)

    def remove_last_scatter_can(self):
        """
        Removes the last added scatter can from the commands.

        See _remove_last_element for further explanation.
        """
        self._remove_last_element(DataCommandId.CAN_SCATTER)

    def remove_last_can_transmission_and_direct(self):
        """
        Removes the last added can transmission and direct from the commands.

        See _remove_last_element for further explanation.
        """
        self._remove_last_element(DataCommandId.CAN_TRANSMISSION)
        self._remove_last_element(DataCommandId.CAN_DIRECT)

    def _remove_last_element(self, command_id):
        """
        Removes the last instance of a command associated with the command_id.

        This method is vital for batch reduction.
        TODO: more explanation
        @param command_id: the command_id of the command which whose last instance we want to remove
        """
        index_to_remove = None
        for index, element in reversed(list(enumerate(self._commands))):
            if element.command_id == command_id:
                index_to_remove = index
                break
        if index_to_remove is not None:
            del (self._commands[index_to_remove])
        else:
            raise RuntimeError(
                "Tried to delete the last instance of {0}, but none was present in the list of "
                "commands".format(command_id))
Exemplo n.º 17
0
    def test_that_single_reduction_evaluates_merged(self):
        # Arrange
        # Build the data information
        file_information_factory = SANSFileInformationFactory()
        file_information = file_information_factory.create_sans_file_information(
            "SANS2D00034484")
        data_builder = get_data_builder(SANSFacility.ISIS, file_information)
        data_builder.set_sample_scatter("SANS2D00034484")
        data_builder.set_sample_transmission("SANS2D00034505")
        data_builder.set_sample_direct("SANS2D00034461")
        data_builder.set_can_scatter("SANS2D00034481")
        data_builder.set_can_transmission("SANS2D00034502")
        data_builder.set_can_direct("SANS2D00034461")

        data_builder.set_calibration("TUBE_SANS2D_BOTH_31681_25Sept15.nxs")
        data_info = data_builder.build()

        # Get the rest of the state from the user file
        user_file = "USER_SANS2D_154E_2p4_4m_M3_Xpress_8mm_SampleChanger.txt"
        user_file_director = StateBuilder.new_instance(
            file_information=file_information,
            data_information=data_info,
            user_filename=user_file)

        state = user_file_director.get_all_states()
        state.reduction.reduction_mode = ReductionMode.MERGED
        state.reduction.merge_fit_mode = FitModeForMerge.BOTH
        state.reduction.merge_scale = 1.0
        state.reduction.merge_shift = 0.0

        state.compatibility.use_compatibility_mode = True

        # Load the sample workspaces
        sample, sample_monitor, transmission_workspace, direct_workspace, \
        can, can_monitor, can_transmission, can_direct = self._load_workspace(state)  # noqa

        # Act
        output_settings = {"OutputWorkspaceMerged": EMPTY_NAME}
        single_reduction_alg = self._run_single_reduction(
            state,
            sample_scatter=sample,
            sample_transmission=transmission_workspace,
            sample_direct=direct_workspace,
            sample_monitor=sample_monitor,
            can_scatter=can,
            can_monitor=can_monitor,
            can_transmission=can_transmission,
            can_direct=can_direct,
            output_settings=output_settings)
        output_workspace = single_reduction_alg.getProperty(
            "OutputWorkspaceMerged").value
        output_scale_factor = single_reduction_alg.getProperty(
            "OutScaleFactor").value
        output_shift_factor = single_reduction_alg.getProperty(
            "OutShiftFactor").value

        tolerance = 1e-6
        expected_shift = 0.00278452
        expected_scale = 0.81439154

        self.assertTrue(abs(expected_shift - output_shift_factor) < tolerance)
        self.assertTrue(abs(expected_scale - output_scale_factor) < tolerance)

        # Compare the output of the reduction with the reference
        reference_file_name = "SANS2D_ws_D20_reference_Merged_1D.nxs"
        self._compare_to_reference(output_workspace, reference_file_name)