def test_that_raises_when_unknown_keyword_is_used(self):
     content = "# MANTID_BATCH_FILE add more text here\n" \
                "sample_sans,74044,output_as,test,new_key_word,test\n"
     batch_file_path = BatchCsvParserTest._save_to_csv(content)
     parser = BatchCsvParser()
     with self.assertRaises(KeyError):
         parser.parse_batch_file(batch_file_path)
     BatchCsvParserTest._remove_csv(batch_file_path)
 def test_that_raises_when_can_transmission_is_specified_incompletely(self):
     content = "# MANTID_BATCH_FILE add more text here\n" \
                "sample_sans,test,output_as,test, can_trans,, can_direct_beam, test\n"
     batch_file_path = BatchCsvParserTest._save_to_csv(content)
     parser = BatchCsvParser()
     with self.assertRaises(ValueError):
         parser.parse_batch_file(batch_file_path)
     BatchCsvParserTest._remove_csv(batch_file_path)
 def test_that_raises_when_sample_scatter_is_missing(self):
     content = "# MANTID_BATCH_FILE add more text here\n" \
                "sample_sans,,output_as,test_file\n"
     batch_file_path = BatchCsvParserTest._save_to_csv(content)
     parser = BatchCsvParser()
     with self.assertRaises(ValueError):
         parser.parse_batch_file(batch_file_path)
     BatchCsvParserTest._remove_csv(batch_file_path)
 def test_raises_if_the_batch_file_uses_key_as_val(self):
     content = "# MANTID_BATCH_FILE add more text here\n" \
                "sample_sans,sample_trans,74024,sample_direct_beam,74014,can_sans,74019,can_trans,74020," \
                "can_direct_beam,output_as, first_eim\n"
     batch_file_path = BatchCsvParserTest._save_to_csv(content)
     parser = BatchCsvParser()
     with self.assertRaises(KeyError):
         parser.parse_batch_file(batch_file_path)
     BatchCsvParserTest._remove_csv(batch_file_path)
 def test_that_does_not_raise_when_output_is_missing(self):
     content = "# MANTID_BATCH_FILE add more text here\n" \
                "sample_sans,test,output_as,\n"
     batch_file_path = BatchCsvParserTest._save_to_csv(content)
     parser = BatchCsvParser()
     try:
         parser.parse_batch_file(batch_file_path)
     except RuntimeError as e:
         self.fail(
             "Batch files are not required to contain output names as these can be autogenerated. "
             "Therefore we did not expect a RuntimeError to be raised when parsing batch file without an "
             "output name. Error raised was: {}".format(str(e)))
     BatchCsvParserTest._remove_csv(batch_file_path)
    def test_bare_comment_without_hash_ignored(self):
        content = " MANTID_BATCH_FILE,foo,bar"
        batch_file_path = BatchCsvParserTest._save_to_csv(content)
        parser = BatchCsvParser()

        output = parser.parse_batch_file(batch_file_path)
        self.assertEqual(0, len(output))
    def test_that_does_not_return_excluded_keywords(self):
        content = "# MANTID_BATCH_FILE add more text here\n" \
                   "sample_sans,1,sample_trans,2,sample_direct_beam,3,output_as,test_file,user_file,user_test_file\n" \
                   "sample_sans,1,can_sans,2,output_as,test_file2,"","", background_sans, background\n"
        batch_file_path = BatchCsvParserTest._save_to_csv(content)
        parser = BatchCsvParser()

        # Act
        output = parser.parse_batch_file(batch_file_path)

        # Assert
        self.assertEqual(len(output), 2)

        first_line = output[0]
        self.assertEqual(first_line.sample_scatter, "1")
        self.assertEqual(first_line.sample_scatter_period, ALL_PERIODS)
        self.assertEqual(first_line.sample_transmission, "2")
        self.assertEqual(first_line.sample_transmission_period, ALL_PERIODS)
        self.assertEqual(first_line.sample_direct, "3")
        self.assertEqual(first_line.sample_direct_period, ALL_PERIODS)
        self.assertEqual(first_line.output_name, "test_file")
        self.assertEqual(first_line.user_file, "user_test_file")

        second_line = output[1]
        # Should have 3 user specified entries and 2 period entries
        self.assertEqual(second_line.sample_scatter, "1")
        self.assertEqual(second_line.sample_scatter_period, ALL_PERIODS)
        self.assertEqual(second_line.can_scatter, "2")
        self.assertEqual(second_line.can_scatter_period, ALL_PERIODS)
        self.assertEqual(second_line.output_name, "test_file2")

        BatchCsvParserTest._remove_csv(batch_file_path)
    def test_that_parses_two_lines_correctly(self):
        content = "# MANTID_BATCH_FILE add more text here\n" \
                   "sample_sans,1,sample_trans,2,sample_direct_beam,3,output_as,test_file,user_file,user_test_file\n" \
                   "sample_sans,1,can_sans,2,output_as,test_file2\n"
        batch_file_path = BatchCsvParserTest._save_to_csv(content)
        parser = BatchCsvParser()

        # Act
        output = parser.parse_batch_file(batch_file_path)

        # Assert
        self.assertEqual(len(output), 2)

        first_line = output[0]
        self.assertEqual(first_line.sample_scatter, "1")
        self.assertEqual(first_line.sample_scatter_period, ALL_PERIODS)
        self.assertEqual(first_line.sample_transmission, "2")
        self.assertEqual(first_line.sample_transmission_period, ALL_PERIODS)
        self.assertEqual(first_line.sample_direct, "3")
        self.assertEqual(first_line.sample_direct_period, ALL_PERIODS)
        self.assertEqual(first_line.output_name, "test_file")
        self.assertEqual(first_line.user_file, "user_test_file")

        second_line = output[1]
        self.assertEqual(second_line.sample_scatter, "1")
        self.assertEqual(second_line.sample_scatter_period, ALL_PERIODS)
        self.assertEqual(second_line.can_scatter, "2")
        self.assertEqual(second_line.can_scatter_period, ALL_PERIODS)
        self.assertEqual(second_line.output_name, "test_file2")

        BatchCsvParserTest._remove_csv(batch_file_path)
    def test_that_parses_period_selection(self):
        content = "# MANTID_BATCH_FILE add more text here\n" \
                   "sample_sans,1p7,can_sans,2P3,output_as,test_file2\n"
        batch_file_path = BatchCsvParserTest._save_to_csv(content)
        parser = BatchCsvParser()

        # Act
        output = parser.parse_batch_file(batch_file_path)

        # Assert
        self.assertEqual(len(output), 1)

        first_line = output[0]
        self.assertEqual(first_line.sample_scatter, "1")
        self.assertEqual(first_line.sample_scatter_period, 7)
        self.assertEqual(first_line.can_scatter, "2")
        self.assertEqual(first_line.can_scatter_period, 3)
        self.assertEqual(first_line.output_name, "test_file2")

        BatchCsvParserTest._remove_csv(batch_file_path)
Ejemplo n.º 10
0
def BatchReduce(filename, format, plotresults=False, saveAlgs=None, verbose=False,  # noqa
                centreit=False, reducer=None, combineDet=None, save_as_zero_error_free=False):  # noqa
    """
        @param filename: the CSV file with the list of runs to analyse
        @param format: type of file to load, nxs for Nexus, etc.
        @param plotresults: if true and this function is run from Mantidplot a graph will be created for the results of each reduction
        @param saveAlgs: this named algorithm will be passed the name of the results workspace and filename (default = 'SaveRKH').
            Pass a tuple of strings to save to multiple file formats
        @param verbose: set to true to write more information to the log (default=False)
        @param centreit: do centre finding (default=False)
        @param reducer: if to use the command line (default) or GUI reducer object
        @param combineDet: that will be forward to WavRangeReduction (rear, front, both, merged, None)
        @param save_as_zero_error_free: Should the reduced workspaces contain zero errors or not
        @return final_setings: A dictionary with some values of the Reduction - Right Now:(scale, shift)
    """
    if saveAlgs is None:
        saveAlgs = {'SaveRKH': 'txt'}

    # From the old interface
    _ = format
    _ = reducer
    _ = verbose

    if centreit:
        raise RuntimeError("The beam centre finder is currently not supported.")
    if plotresults:
        raise RuntimeError("Plotting the results is currently not supported.")

    # Set up the save algorithms
    save_algs = []

    if saveAlgs:
        for key, _ in list(saveAlgs.items()):
            if key == "SaveRKH":
                save_algs.append(SaveType.RKH)
            elif key == "SaveNexus":
                save_algs.append(SaveType.NEXUS)
            elif key == "SaveNistQxy":
                save_algs.append(SaveType.NIST_QXY)
            elif key == "SaveCanSAS" or key == "SaveCanSAS1D":
                save_algs.append(SaveType.CAN_SAS)
            elif key == "SaveCSV":
                save_algs.append(SaveType.CSV)
            elif key == "SaveNXcanSAS":
                save_algs.append(SaveType.NX_CAN_SAS)
            else:
                raise RuntimeError("The save format {0} is not known.".format(key))
        output_mode = OutputMode.BOTH
    else:
        output_mode = OutputMode.PUBLISH_TO_ADS

    # Get the information from the csv file
    batch_csv_parser = BatchCsvParser()
    parsed_batch_entries = batch_csv_parser.parse_batch_file(filename)

    # Get a state with all existing settings
    for parsed_batch_entry in parsed_batch_entries:
        assert isinstance(parsed_batch_entry, RowEntries)
        # A new user file. If a new user file is provided then this will overwrite all other settings from,
        # otherwise we might have cross-talk between user files.
        if parsed_batch_entry.user_file:
            MaskFile(parsed_batch_entry.user_file)

        # Sample scatter
        sample_scatter = parsed_batch_entry.sample_scatter
        sample_scatter_period = parsed_batch_entry.sample_scatter_period
        AssignSample(sample_run=sample_scatter, period=sample_scatter_period)

        # Sample transmission
        if parsed_batch_entry.sample_transmission and parsed_batch_entry.sample_direct:
            sample_direct = parsed_batch_entry.sample_direct
            sample_direct_period = parsed_batch_entry.sample_direct_period

            sample_transmission = parsed_batch_entry.sample_transmission
            sample_transmission_period = parsed_batch_entry.sample_transmission_period

            TransmissionSample(sample=sample_transmission, direct=sample_direct,
                               period_t=sample_transmission_period, period_d=sample_direct_period)

        # Can scatter
        if parsed_batch_entry.can_scatter:
            can_scatter = parsed_batch_entry.can_scatter
            can_scatter_period = parsed_batch_entry.can_scatter_period
            AssignCan(can_run=can_scatter, period=can_scatter_period)

        # Can transmission
        if parsed_batch_entry.can_transmission and parsed_batch_entry.can_direct:
            can_transmission = parsed_batch_entry.can_transmission
            can_transmission_period = parsed_batch_entry.can_transmission_period
            can_direct = parsed_batch_entry.can_direct
            can_direct_period = parsed_batch_entry.can_direct_period

            TransmissionCan(can=can_transmission, direct=can_direct,
                            period_t=can_transmission_period, period_d=can_direct_period)

        # Name of the output. We need to modify the name according to the setup of the old reduction mechanism
        output_name = parsed_batch_entry.output_name

        # In addition to the output name the user can specify with combineDet an additional suffix (in addition to the
        # suffix that the user can set already -- was there previously, so we have to provide that)
        use_reduction_mode_as_suffix = combineDet is not None

        # Apply save options
        if save_algs:
            set_save(save_algorithms=save_algs, save_as_zero_error_free=save_as_zero_error_free)

        # Run the reduction for a single
        reduced_workspace_name = WavRangeReduction(combineDet=combineDet, output_name=output_name,
                                                   output_mode=output_mode,
                                                   use_reduction_mode_as_suffix=use_reduction_mode_as_suffix)

        # Remove the settings which were very specific for this single reduction which are:
        # 1. The last user file (if any was set)
        # 2. The last scatter entry
        # 3. The last scatter transmission and direct entry (if any were set)
        # 4. The last can scatter ( if any was set)
        # 5. The last can transmission and direct entry (if any were set)
        if parsed_batch_entry.user_file:
            director.remove_last_user_file()

        director.remove_last_scatter_sample()

        if parsed_batch_entry.sample_transmission and parsed_batch_entry.sample_direct:
            director.remove_last_sample_transmission_and_direct()

        if parsed_batch_entry.can_scatter:
            director.remove_last_scatter_can()

        if parsed_batch_entry.can_transmission and parsed_batch_entry.can_direct:
            director.remove_last_can_transmission_and_direct()

        # Plot the results if that was requested, the flag 1 is from the old version.
        if plotresults == 1:
            if AnalysisDataService.doesExist(reduced_workspace_name):
                workspace = AnalysisDataService.retrieve(reduced_workspace_name)
                if isinstance(workspace, WorkspaceGroup):
                    for ws in workspace:
                        PlotResult(ws.name())
                else:
                    PlotResult(workspace.name())
 def test_handles_unknown_data(self):
     parser = BatchCsvParser()
     with self.assertRaises(SyntaxError):
         parser.parse_batch_file('LOQ74044.nxs')
 def test_empty_batch_file_name_throws(self):
     batch_file_path = "not_there.csv"
     parser = BatchCsvParser()
     with self.assertRaises(RuntimeError):
         parser.parse_batch_file(batch_file_path)