コード例 #1
0
 def test_that_does_not_raise_when_output_is_missing(self):
     content = "# MANTID_BATCH_FILE add more text here\n" \
                "sample_sans,test,output_as,\n"
     batch_file_path = BatchCsvParserTest._save_to_csv(content)
     parser = BatchCsvParser(batch_file_path)
     try:
         parser.parse_batch_file()
     except RuntimeError as e:
         self.fail("Batch files are not required to contain output names as these can be autogenerated. "
                   "Therefore we did not expect a RuntimeError to be raised when parsing batch file without an "
                   "output name. Error raised was: {}".format(str(e)))
     BatchCsvParserTest._remove_csv(batch_file_path)
コード例 #2
0
 def test_that_does_not_raise_when_output_is_missing(self):
     content = "# MANTID_BATCH_FILE add more text here\n" \
                "sample_sans,test,output_as,\n"
     batch_file_path = BatchCsvParserTest._save_to_csv(content)
     parser = BatchCsvParser(batch_file_path)
     try:
         parser.parse_batch_file()
     except RuntimeError as e:
         self.fail(
             "Batch files are not required to contain output names as these can be autogenerated. "
             "Therefore we did not expect a RuntimeError to be raised when parsing batch file without an "
             "output name. Error raised was: {}".format(str(e)))
     BatchCsvParserTest._remove_csv(batch_file_path)
コード例 #3
0
    def test_that_parses_two_lines_correctly(self):
        content = "# MANTID_BATCH_FILE add more text here\n" \
                   "sample_sans,1,sample_trans,2,sample_direct_beam,3,output_as,test_file,user_file,user_test_file\n" \
                   "sample_sans,1,can_sans,2,output_as,test_file2\n"
        batch_file_path = BatchCsvParserTest._save_to_csv(content)
        parser = BatchCsvParser(batch_file_path)

        # Act
        output = parser.parse_batch_file()

        # Assert
        self.assertTrue(len(output) == 2)

        first_line = output[0]
        # Should have 5 user specified entries and 3 period entries
        self.assertTrue(len(first_line) == 8)
        self.assertTrue(first_line[BatchReductionEntry.SampleScatter] == "1")
        self.assertTrue(first_line[BatchReductionEntry.SampleScatterPeriod] == ALL_PERIODS)
        self.assertTrue(first_line[BatchReductionEntry.SampleTransmission] == "2")
        self.assertTrue(first_line[BatchReductionEntry.SampleTransmissionPeriod] == ALL_PERIODS)
        self.assertTrue(first_line[BatchReductionEntry.SampleDirect] == "3")
        self.assertTrue(first_line[BatchReductionEntry.SampleDirectPeriod] == ALL_PERIODS)
        self.assertTrue(first_line[BatchReductionEntry.Output] == "test_file")
        self.assertTrue(first_line[BatchReductionEntry.UserFile] == "user_test_file")
        second_line = output[1]

        # Should have 3 user specified entries and 2 period entries
        self.assertTrue(len(second_line) == 5)
        self.assertTrue(second_line[BatchReductionEntry.SampleScatter] == "1")
        self.assertTrue(second_line[BatchReductionEntry.SampleScatterPeriod] == ALL_PERIODS)
        self.assertTrue(second_line[BatchReductionEntry.CanScatter] == "2")
        self.assertTrue(second_line[BatchReductionEntry.CanScatterPeriod] == ALL_PERIODS)
        self.assertTrue(second_line[BatchReductionEntry.Output] == "test_file2")

        BatchCsvParserTest._remove_csv(batch_file_path)
コード例 #4
0
    def test_that_parses_two_lines_correctly(self):
        content = "# MANTID_BATCH_FILE add more text here\n" \
                   "sample_sans,1,sample_trans,2,sample_direct_beam,3,output_as,test_file,user_file,user_test_file\n" \
                   "sample_sans,1,can_sans,2,output_as,test_file2\n"
        batch_file_path = BatchCsvParserTest._save_to_csv(content)
        parser = BatchCsvParser(batch_file_path)

        # Act
        output = parser.parse_batch_file()

        # Assert
        self.assertTrue(len(output) == 2)

        first_line = output[0]
        # Should have 5 user specified entries and 3 period entries
        self.assertTrue(len(first_line) == 8)
        self.assertTrue(first_line[BatchReductionEntry.SampleScatter] == "1")
        self.assertTrue(first_line[BatchReductionEntry.SampleScatterPeriod] == ALL_PERIODS)
        self.assertTrue(first_line[BatchReductionEntry.SampleTransmission] == "2")
        self.assertTrue(first_line[BatchReductionEntry.SampleTransmissionPeriod] == ALL_PERIODS)
        self.assertTrue(first_line[BatchReductionEntry.SampleDirect] == "3")
        self.assertTrue(first_line[BatchReductionEntry.SampleDirectPeriod] == ALL_PERIODS)
        self.assertTrue(first_line[BatchReductionEntry.Output] == "test_file")
        self.assertTrue(first_line[BatchReductionEntry.UserFile] == "user_test_file")
        second_line = output[1]

        # Should have 3 user specified entries and 2 period entries
        self.assertTrue(len(second_line) == 5)
        self.assertTrue(second_line[BatchReductionEntry.SampleScatter] == "1")
        self.assertTrue(second_line[BatchReductionEntry.SampleScatterPeriod] == ALL_PERIODS)
        self.assertTrue(second_line[BatchReductionEntry.CanScatter] == "2")
        self.assertTrue(second_line[BatchReductionEntry.CanScatterPeriod] == ALL_PERIODS)
        self.assertTrue(second_line[BatchReductionEntry.Output] == "test_file2")

        BatchCsvParserTest._remove_csv(batch_file_path)
コード例 #5
0
ファイル: run_tab_presenter.py プロジェクト: luzpaz/mantid
    def on_batch_file_load(self):
        """
        Loads a batch file and populates the batch table based on that.
        """
        try:
            # 1. Get the batch file from the view
            batch_file_path = self._view.get_batch_file_path()

            if not batch_file_path:
                return

            if not os.path.exists(batch_file_path):
                raise RuntimeError("The batch file path {} does not exist. Make sure a valid batch file path"
                                   " has been specified.".format(batch_file_path))

            self._table_model.batch_file = batch_file_path

            # 2. Read the batch file
            batch_file_parser = BatchCsvParser(batch_file_path)
            parsed_rows = batch_file_parser.parse_batch_file()

            # 3. Populate the table
            self._table_model.clear_table_entries()
            for index, row in enumerate(parsed_rows):
                self._add_row_to_table_model(row, index)
            self._table_model.remove_table_entries([len(parsed_rows)])

            self.update_view_from_table_model()

            self._beam_centre_presenter.on_update_rows()
            self._masking_table_presenter.on_update_rows()

        except RuntimeError as e:
            self.sans_logger.error("Loading of the batch file failed. {}".format(str(e)))
            self.display_warning_box('Warning', 'Loading of the batch file failed', str(e))
コード例 #6
0
    def test_that_does_not_return_excluded_keywords(self):
        content = "# MANTID_BATCH_FILE add more text here\n" \
                   "sample_sans,1,sample_trans,2,sample_direct_beam,3,output_as,test_file,user_file,user_test_file\n" \
                   "sample_sans,1,can_sans,2,output_as,test_file2,"","", background_sans, background\n"
        batch_file_path = BatchCsvParserTest._save_to_csv(content)
        parser = BatchCsvParser(batch_file_path)

        # Act
        output = parser.parse_batch_file()

        # Assert
        self.assertEqual(len(output), 2)

        first_line = output[0]
        # Should have 5 user specified entries and 3 period entries
        self.assertEqual(len(first_line), 8)
        self.assertEqual(first_line[BatchReductionEntry.SampleScatter], "1")
        self.assertEqual(first_line[BatchReductionEntry.SampleScatterPeriod],
                         ALL_PERIODS)
        self.assertEqual(first_line[BatchReductionEntry.SampleTransmission],
                         "2")
        self.assertEqual(
            first_line[BatchReductionEntry.SampleTransmissionPeriod],
            ALL_PERIODS)
        self.assertEqual(first_line[BatchReductionEntry.SampleDirect], "3")
        self.assertEqual(first_line[BatchReductionEntry.SampleDirectPeriod],
                         ALL_PERIODS)
        self.assertEqual(first_line[BatchReductionEntry.Output], "test_file")
        self.assertEqual(first_line[BatchReductionEntry.UserFile],
                         "user_test_file")
        second_line = output[1]

        # Should have 3 user specified entries and 2 period entries
        self.assertEqual(len(second_line), 5)
        self.assertEqual(second_line[BatchReductionEntry.SampleScatter], "1")
        self.assertEqual(second_line[BatchReductionEntry.SampleScatterPeriod],
                         ALL_PERIODS)
        self.assertEqual(second_line[BatchReductionEntry.CanScatter], "2")
        self.assertEqual(second_line[BatchReductionEntry.CanScatterPeriod],
                         ALL_PERIODS)
        self.assertEqual(second_line[BatchReductionEntry.Output], "test_file2")

        BatchCsvParserTest._remove_csv(batch_file_path)
コード例 #7
0
    def test_that_parses_period_selection(self):
        content = "# MANTID_BATCH_FILE add more text here\n" \
                   "sample_sans,1p7,can_sans,2P3,output_as,test_file2\n"
        batch_file_path = BatchCsvParserTest._save_to_csv(content)
        parser = BatchCsvParser(batch_file_path)

        # Act
        output = parser.parse_batch_file()

        # Assert
        self.assertTrue(len(output) == 1)

        first_line = output[0]
        # Should have 5 user specified entries and 3 period entries
        self.assertTrue(len(first_line) == 5)
        self.assertTrue(first_line[BatchReductionEntry.SampleScatter] == "1")
        self.assertTrue(first_line[BatchReductionEntry.SampleScatterPeriod] == 7)
        self.assertTrue(first_line[BatchReductionEntry.CanScatter] == "2")
        self.assertTrue(first_line[BatchReductionEntry.CanScatterPeriod] == 3)
        self.assertTrue(first_line[BatchReductionEntry.Output] == "test_file2")

        BatchCsvParserTest._remove_csv(batch_file_path)
コード例 #8
0
ファイル: run_tab_presenter.py プロジェクト: ebknudsen/mantid
    def on_batch_file_load(self):
        """
        Loads a batch file and populates the batch table based on that.
        """
        try:
            # 1. Get the batch file from the view
            batch_file_path = self._view.get_batch_file_path()

            if not batch_file_path:
                return

            if not os.path.exists(batch_file_path):
                raise RuntimeError(
                    "The batch file path {} does not exist. Make sure a valid batch file path"
                    " has been specified.".format(batch_file_path))

            # 2. Read the batch file
            batch_file_parser = BatchCsvParser(batch_file_path)
            parsed_rows = batch_file_parser.parse_batch_file()
            # 3. Clear the table
            self._view.clear_table()

            # 4. Populate the table
            for row in parsed_rows:
                self._populate_row_in_table(row)

            # 5. Populate the selected instrument and the correct detector selection
            self._setup_instrument_specific_settings()

            # 6. Perform calls on child presenters
            self._masking_table_presenter.on_update_rows()
            self._settings_diagnostic_tab_presenter.on_update_rows()
            self._beam_centre_presenter.on_update_rows()

        except RuntimeError as e:
            self.sans_logger.error(
                "Loading of the batch file failed. Ensure that the path to your files has been added"
                " to the Mantid search directories! See here for more details: {}"
                .format(str(e)))
コード例 #9
0
    def test_that_parses_period_selection(self):
        content = "# MANTID_BATCH_FILE add more text here\n" \
                   "sample_sans,1p7,can_sans,2P3,output_as,test_file2\n"
        batch_file_path = BatchCsvParserTest._save_to_csv(content)
        parser = BatchCsvParser(batch_file_path)

        # Act
        output = parser.parse_batch_file()

        # Assert
        self.assertTrue(len(output) == 1)

        first_line = output[0]
        # Should have 5 user specified entries and 3 period entries
        self.assertTrue(len(first_line) == 5)
        self.assertTrue(first_line[BatchReductionEntry.SampleScatter] == "1")
        self.assertTrue(first_line[BatchReductionEntry.SampleScatterPeriod] == 7)
        self.assertTrue(first_line[BatchReductionEntry.CanScatter] == "2")
        self.assertTrue(first_line[BatchReductionEntry.CanScatterPeriod] == 3)
        self.assertTrue(first_line[BatchReductionEntry.Output] == "test_file2")

        BatchCsvParserTest._remove_csv(batch_file_path)
コード例 #10
0
def BatchReduce(
        filename,
        format,
        plotresults=False,
        saveAlgs=None,
        verbose=False,  # noqa
        centreit=False,
        reducer=None,
        combineDet=None,
        save_as_zero_error_free=False):  # noqa
    """
        @param filename: the CSV file with the list of runs to analyse
        @param format: type of file to load, nxs for Nexus, etc.
        @param plotresults: if true and this function is run from Mantidplot a graph will be created for the results of each reduction
        @param saveAlgs: this named algorithm will be passed the name of the results workspace and filename (default = 'SaveRKH').
            Pass a tuple of strings to save to multiple file formats
        @param verbose: set to true to write more information to the log (default=False)
        @param centreit: do centre finding (default=False)
        @param reducer: if to use the command line (default) or GUI reducer object
        @param combineDet: that will be forward to WavRangeReduction (rear, front, both, merged, None)
        @param save_as_zero_error_free: Should the reduced workspaces contain zero errors or not
        @return final_setings: A dictionary with some values of the Reduction - Right Now:(scale, shift)
    """
    if saveAlgs is None:
        saveAlgs = {'SaveRKH': 'txt'}

    # From the old interface
    _ = format
    _ = reducer
    _ = verbose

    if centreit:
        raise RuntimeError(
            "The beam centre finder is currently not supported.")
    if plotresults:
        raise RuntimeError("Plotting the results is currenlty not supported.")

    # Set up the save algorithms
    save_algs = []

    if saveAlgs:
        for key, _ in list(saveAlgs.items()):
            if key == "SaveRKH":
                save_algs.append(SaveType.RKH)
            elif key == "SaveNexus":
                save_algs.append(SaveType.Nexus)
            elif key == "SaveNistQxy":
                save_algs.append(SaveType.NistQxy)
            elif key == "SaveCanSAS" or key == "SaveCanSAS1D":
                save_algs.append(SaveType.CanSAS)
            elif key == "SaveCSV":
                save_algs.append(SaveType.CSV)
            elif key == "SaveNXcanSAS":
                save_algs.append(SaveType.NXcanSAS)
            else:
                raise RuntimeError(
                    "The save format {0} is not known.".format(key))
        output_mode = OutputMode.Both
    else:
        output_mode = OutputMode.PublishToADS

    # Get the information from the csv file
    batch_csv_parser = BatchCsvParser(filename)
    parsed_batch_entries = batch_csv_parser.parse_batch_file()

    # Get a state with all existing settings
    for parsed_batch_entry in parsed_batch_entries:
        # A new user file. If a new user file is provided then this will overwrite all other settings from,
        # otherwise we might have cross-talk between user files.
        if BatchReductionEntry.UserFile in list(parsed_batch_entry.keys()):
            user_file = parsed_batch_entry[BatchReductionEntry.UserFile]
            MaskFile(user_file)

        # Sample scatter
        sample_scatter = parsed_batch_entry[BatchReductionEntry.SampleScatter]
        sample_scatter_period = parsed_batch_entry[
            BatchReductionEntry.SampleScatterPeriod]
        AssignSample(sample_run=sample_scatter, period=sample_scatter_period)

        # Sample transmission
        if (BatchReductionEntry.SampleTransmission in list(
                parsed_batch_entry.keys()) and BatchReductionEntry.SampleDirect
                in list(parsed_batch_entry.keys())):
            sample_transmission = parsed_batch_entry[
                BatchReductionEntry.SampleTransmission]
            sample_transmission_period = parsed_batch_entry[
                BatchReductionEntry.SampleTransmissionPeriod]
            sample_direct = parsed_batch_entry[
                BatchReductionEntry.SampleDirect]
            sample_direct_period = parsed_batch_entry[
                BatchReductionEntry.SampleDirectPeriod]
            TransmissionSample(sample=sample_transmission,
                               direct=sample_direct,
                               period_t=sample_transmission_period,
                               period_d=sample_direct_period)

        # Can scatter
        if BatchReductionEntry.CanScatter in list(parsed_batch_entry.keys()):
            can_scatter = parsed_batch_entry[BatchReductionEntry.CanScatter]
            can_scatter_period = parsed_batch_entry[
                BatchReductionEntry.CanScatterPeriod]
            AssignCan(can_run=can_scatter, period=can_scatter_period)

        # Can transmission
        if (BatchReductionEntry.CanTransmission in list(
                parsed_batch_entry.keys()) and BatchReductionEntry.CanDirect
                in list(parsed_batch_entry.keys())):
            can_transmission = parsed_batch_entry[
                BatchReductionEntry.CanTransmission]
            can_transmission_period = parsed_batch_entry[
                BatchReductionEntry.CanTransmissionPeriod]
            can_direct = parsed_batch_entry[BatchReductionEntry.CanDirect]
            can_direct_period = parsed_batch_entry[
                BatchReductionEntry.CanDirectPeriod]
            TransmissionCan(can=can_transmission,
                            direct=can_direct,
                            period_t=can_transmission_period,
                            period_d=can_direct_period)

        # Name of the output. We need to modify the name according to the setup of the old reduction mechanism
        output_name = parsed_batch_entry[BatchReductionEntry.Output]

        # In addition to the output name the user can specify with combineDet an additional suffix (in addtion to the
        # suffix that the user can set already -- was there previously, so we have to provide that)
        use_reduction_mode_as_suffix = combineDet is not None

        # Apply save options
        if save_algs:
            set_save(save_algorithms=save_algs,
                     save_as_zero_error_free=save_as_zero_error_free)

        # Run the reduction for a single
        reduced_workspace_name = WavRangeReduction(
            combineDet=combineDet,
            output_name=output_name,
            output_mode=output_mode,
            use_reduction_mode_as_suffix=use_reduction_mode_as_suffix)

        # Remove the settings which were very specific for this single reduction which are:
        # 1. The last user file (if any was set)
        # 2. The last scatter entry
        # 3. The last scatter transmission and direct entry (if any were set)
        # 4. The last can scatter ( if any was set)
        # 5. The last can transmission and direct entry (if any were set)
        if BatchReductionEntry.UserFile in list(parsed_batch_entry.keys()):
            director.remove_last_user_file()
        director.remove_last_scatter_sample()

        if (BatchReductionEntry.SampleTransmission in list(
                parsed_batch_entry.keys()) and BatchReductionEntry.SampleDirect
                in list(parsed_batch_entry.keys())):  # noqa
            director.remove_last_sample_transmission_and_direct()

        if BatchReductionEntry.CanScatter in list(parsed_batch_entry.keys()):
            director.remove_last_scatter_can()

        if (BatchReductionEntry.CanTransmission in list(
                parsed_batch_entry.keys()) and BatchReductionEntry.CanDirect
                in list(parsed_batch_entry.keys())):
            director.remove_last_can_transmission_and_direct()

        # Plot the results if that was requested, the flag 1 is from the old version.
        if plotresults == 1:
            if AnalysisDataService.doesExist(reduced_workspace_name):
                workspace = AnalysisDataService.retrieve(
                    reduced_workspace_name)
                if isinstance(workspace, WorkspaceGroup):
                    for ws in workspace:
                        PlotResult(ws.getName())
                else:
                    PlotResult(workspace.getName())
コード例 #11
0
def BatchReduce(filename, format, plotresults=False, saveAlgs=None, verbose=False,  # noqa
                centreit=False, reducer=None, combineDet=None, save_as_zero_error_free=False):  # noqa
    """
        @param filename: the CSV file with the list of runs to analyse
        @param format: type of file to load, nxs for Nexus, etc.
        @param plotresults: if true and this function is run from Mantidplot a graph will be created for the results of each reduction
        @param saveAlgs: this named algorithm will be passed the name of the results workspace and filename (default = 'SaveRKH').
            Pass a tuple of strings to save to multiple file formats
        @param verbose: set to true to write more information to the log (default=False)
        @param centreit: do centre finding (default=False)
        @param reducer: if to use the command line (default) or GUI reducer object
        @param combineDet: that will be forward to WavRangeReduction (rear, front, both, merged, None)
        @param save_as_zero_error_free: Should the reduced workspaces contain zero errors or not
        @return final_setings: A dictionary with some values of the Reduction - Right Now:(scale, shift)
    """
    if saveAlgs is None:
        saveAlgs = {'SaveRKH': 'txt'}

    # From the old interface
    _ = format
    _ = reducer
    _ = verbose

    if centreit:
        raise RuntimeError("The beam centre finder is currently not supported.")
    if plotresults:
        raise RuntimeError("Plotting the results is currenlty not supported.")

    # Set up the save algorithms
    save_algs = []

    if saveAlgs:
        for key, _ in list(saveAlgs.items()):
            if key == "SaveRKH":
                save_algs.append(SaveType.RKH)
            elif key == "SaveNexus":
                save_algs.append(SaveType.Nexus)
            elif key == "SaveNistQxy":
                save_algs.append(SaveType.NistQxy)
            elif key == "SaveCanSAS" or key == "SaveCanSAS1D":
                save_algs.append(SaveType.CanSAS)
            elif key == "SaveCSV":
                save_algs.append(SaveType.CSV)
            elif key == "SaveNXcanSAS":
                save_algs.append(SaveType.NXcanSAS)
            else:
                raise RuntimeError("The save format {0} is not known.".format(key))
        output_mode = OutputMode.Both
    else:
        output_mode = OutputMode.PublishToADS

    # Get the information from the csv file
    batch_csv_parser = BatchCsvParser(filename)
    parsed_batch_entries = batch_csv_parser.parse_batch_file()

    # Get a state with all existing settings
    for parsed_batch_entry in parsed_batch_entries:
        # A new user file. If a new user file is provided then this will overwrite all other settings from,
        # otherwise we might have cross-talk between user files.
        if BatchReductionEntry.UserFile in list(parsed_batch_entry.keys()):
            user_file = parsed_batch_entry[BatchReductionEntry.UserFile]
            MaskFile(user_file)

        # Sample scatter
        sample_scatter = parsed_batch_entry[BatchReductionEntry.SampleScatter]
        sample_scatter_period = parsed_batch_entry[BatchReductionEntry.SampleScatterPeriod]
        AssignSample(sample_run=sample_scatter, period=sample_scatter_period)

        # Sample transmission
        if (BatchReductionEntry.SampleTransmission in list(parsed_batch_entry.keys()) and
           BatchReductionEntry.SampleDirect in list(parsed_batch_entry.keys())):
            sample_transmission = parsed_batch_entry[BatchReductionEntry.SampleTransmission]
            sample_transmission_period = parsed_batch_entry[BatchReductionEntry.SampleTransmissionPeriod]
            sample_direct = parsed_batch_entry[BatchReductionEntry.SampleDirect]
            sample_direct_period = parsed_batch_entry[BatchReductionEntry.SampleDirectPeriod]
            TransmissionSample(sample=sample_transmission, direct=sample_direct,
                               period_t=sample_transmission_period, period_d=sample_direct_period)

        # Can scatter
        if BatchReductionEntry.CanScatter in list(parsed_batch_entry.keys()):
            can_scatter = parsed_batch_entry[BatchReductionEntry.CanScatter]
            can_scatter_period = parsed_batch_entry[BatchReductionEntry.CanScatterPeriod]
            AssignCan(can_run=can_scatter, period=can_scatter_period)

        # Can transmission
        if (BatchReductionEntry.CanTransmission in list(parsed_batch_entry.keys()) and
           BatchReductionEntry.CanDirect in list(parsed_batch_entry.keys())):
            can_transmission = parsed_batch_entry[BatchReductionEntry.CanTransmission]
            can_transmission_period = parsed_batch_entry[BatchReductionEntry.CanTransmissionPeriod]
            can_direct = parsed_batch_entry[BatchReductionEntry.CanDirect]
            can_direct_period = parsed_batch_entry[BatchReductionEntry.CanDirectPeriod]
            TransmissionCan(can=can_transmission, direct=can_direct,
                            period_t=can_transmission_period, period_d=can_direct_period)

        # Name of the output. We need to modify the name according to the setup of the old reduction mechanism
        output_name = parsed_batch_entry[BatchReductionEntry.Output]

        # In addition to the output name the user can specify with combineDet an additional suffix (in addtion to the
        # suffix that the user can set already -- was there previously, so we have to provide that)
        use_reduction_mode_as_suffix = combineDet is not None

        # Apply save options
        if save_algs:
            set_save(save_algorithms=save_algs, save_as_zero_error_free=save_as_zero_error_free)

        # Run the reduction for a single
        reduced_workspace_name = WavRangeReduction(combineDet=combineDet, output_name=output_name,
                                                   output_mode=output_mode,
                                                   use_reduction_mode_as_suffix=use_reduction_mode_as_suffix)

        # Remove the settings which were very specific for this single reduction which are:
        # 1. The last user file (if any was set)
        # 2. The last scatter entry
        # 3. The last scatter transmission and direct entry (if any were set)
        # 4. The last can scatter ( if any was set)
        # 5. The last can transmission and direct entry (if any were set)
        if BatchReductionEntry.UserFile in list(parsed_batch_entry.keys()):
            director.remove_last_user_file()
        director.remove_last_scatter_sample()

        if (BatchReductionEntry.SampleTransmission in list(parsed_batch_entry.keys()) and
            BatchReductionEntry.SampleDirect in list(parsed_batch_entry.keys())):  # noqa
            director.remove_last_sample_transmission_and_direct()

        if BatchReductionEntry.CanScatter in list(parsed_batch_entry.keys()):
            director.remove_last_scatter_can()

        if (BatchReductionEntry.CanTransmission in list(parsed_batch_entry.keys()) and
           BatchReductionEntry.CanDirect in list(parsed_batch_entry.keys())):
            director.remove_last_can_transmission_and_direct()

        # Plot the results if that was requested, the flag 1 is from the old version.
        if plotresults == 1:
            if AnalysisDataService.doesExist(reduced_workspace_name):
                workspace = AnalysisDataService.retrieve(reduced_workspace_name)
                if isinstance(workspace, WorkspaceGroup):
                    for ws in workspace:
                        PlotResult(ws.getName())
                else:
                    PlotResult(workspace.getName())