Exemplo n.º 1
0
    def test_LoadPRFFile(self):
        """ Test to load a .prf file
        """
        # 1. Create  test .prf file
        prffilename = "test.prf"
        self._createPrfFile(prffilename)

        # 2. Execute the algorithm
        alg_test = run_algorithm("LoadFullprofFile",
                Filename = prffilename,
                OutputWorkspace = "Data",
                PeakParameterWorkspace = "Info")

        self.assertTrue(alg_test.isExecuted())

        # 3. Check data
        dataws = AnalysisDataService.retrieve("Data")
        self.assertEqual(dataws.getNumberHistograms(), 4)
        self.assertEqual(len(dataws.readX(0)), 36)

        #    value
        self.assertEqual(dataws.readX(0)[13], 5026.3223)
        self.assertEqual(dataws.readY(1)[30], 0.3819)

        # 4. Clean
        os.remove(prffilename)
        AnalysisDataService.remove("Data")
        AnalysisDataService.remove("Info")


        return
Exemplo n.º 2
0
 def test_setTitle(self):        
     run_algorithm('CreateWorkspace', OutputWorkspace='ws1',DataX=[1.,2.,3.], DataY=[2.,3.], DataE=[2.,3.],UnitX='TOF')
     ws1 = AnalysisDataService['ws1']
     title = 'test_title'
     ws1.setTitle(title)
     self.assertEquals(title, ws1.getTitle())
     AnalysisDataService.remove(ws1.getName())
Exemplo n.º 3
0
    def test_LoadHKLFile(self):
        """ Test to load a .hkl file
        """
        # 1. Create a test file
        hklfilename = "test.hkl"
        self._createHKLFile(hklfilename)

        # 2.
        alg_test = run_algorithm("LoadFullprofFile", Filename = hklfilename,
                OutputWorkspace = "Foo", PeakParameterWorkspace = "PeakParameterTable")

        self.assertTrue(alg_test.isExecuted())

        # 3. Verify some values
        tablews = AnalysisDataService.retrieve("PeakParameterTable")
        self.assertEqual(4, tablews.rowCount())

        #   alpha of (11 5 1)/Row 0
        self.assertEqual(0.34252, tablews.cell(0, 3))

        # 4. Delete the test hkl file
        os.remove(hklfilename)
        AnalysisDataService.remove("PeakParameterTable")
        AnalysisDataService.remove("Foo")

        return
Exemplo n.º 4
0
    def test_batch_reduction_on_multiperiod_file(self):
        # Arrange
        # Build the data information
        data_builder = get_data_builder(SANSFacility.ISIS)
        data_builder.set_sample_scatter("SANS2D0005512")

        data_info = data_builder.build()

        # Get the rest of the state from the user file
        user_file_director = StateDirectorISIS(data_info)
        user_file_director.set_user_file("MASKSANS2Doptions.091A")
        # Set the reduction mode to LAB
        user_file_director.set_reduction_builder_reduction_mode(ISISReductionMode.LAB)
        state = user_file_director.construct()

        # Act
        states = [state]
        self._run_batch_reduction(states, use_optimizations=False)

        # Assert
        # We only assert that the expected workspaces exist on the ADS
        expected_workspaces = ["5512p1rear_1D_2.0_14.0Phi-45.0_45.0", "5512p2rear_1D_2.0_14.0Phi-45.0_45.0",
                               "5512p3rear_1D_2.0_14.0Phi-45.0_45.0", "5512p4rear_1D_2.0_14.0Phi-45.0_45.0",
                               "5512p5rear_1D_2.0_14.0Phi-45.0_45.0", "5512p6rear_1D_2.0_14.0Phi-45.0_45.0",
                               "5512p7rear_1D_2.0_14.0Phi-45.0_45.0", "5512p8rear_1D_2.0_14.0Phi-45.0_45.0",
                               "5512p9rear_1D_2.0_14.0Phi-45.0_45.0", "5512p10rear_1D_2.0_14.0Phi-45.0_45.0",
                               "5512p11rear_1D_2.0_14.0Phi-45.0_45.0", "5512p12rear_1D_2.0_14.0Phi-45.0_45.0",
                               "5512p13rear_1D_2.0_14.0Phi-45.0_45.0"]
        for element in expected_workspaces:
            self.assertTrue(AnalysisDataService.doesExist(element))

        # Clean up
        for element in expected_workspaces:
            AnalysisDataService.remove(element)
Exemplo n.º 5
0
    def test_removing_from_ads_calls_any_change_handle(self):
        CreateSampleWorkspace(OutputWorkspace="ws1")

        self.project.anyChangeHandle = mock.MagicMock()
        ADS.remove("ws1")

        self.assertEqual(1, self.project.anyChangeHandle.call_count)
Exemplo n.º 6
0
    def test_saveGSS(self):
        """ Test to Save a GSAS file to match V-drive
        """
        # Create a test data file and workspace
        binfilename = "testbin.dat"
        self._createBinFile(binfilename)

        datawsname = "TestInputWorkspace"
        self._createDataWorkspace(datawsname)

        # Execute
        alg_test = run_algorithm("SaveVulcanGSS", 
                InputWorkspace = datawsname,
                BinFilename = binfilename,
                OutputWorkspace = datawsname+"_rebinned",
                GSSFilename = "tempout.gda")

        self.assertTrue(alg_test.isExecuted())

        # Verify ....
        outputws = AnalysisDataService.retrieve(datawsname+"_rebinned")
        #self.assertEqual(4, tablews.rowCount())

        # Delete the test hkl file
        os.remove(binfilename)
        AnalysisDataService.remove("InputWorkspace")
        AnalysisDataService.remove(datawsname+"_rebinned")

        return
Exemplo n.º 7
0
    def test_exportFileNew(self):
        """ Test to export logs without header file
        """
        # Generate the matrix workspace with some logs
        ws = self.createTestWorkspace()
        AnalysisDataService.addOrReplace("TestMatrixWS", ws)

        # Test algorithm
        alg_test = run_algorithm("ExportExperimentLog",
            InputWorkspace = "TestMatrixWS",
            OutputFilename = "TestRecord001.txt",
            SampleLogNames = ["run_number", "duration", "proton_charge", "proton_charge", "proton_charge"],
            SampleLogTitles = ["RUN", "Duration", "ProtonCharge", "MinPCharge", "MeanPCharge"],
            SampleLogOperation = [None, None, "sum", "min", "average"],
            FileMode = "new")

        # Validate
        self.assertTrue(alg_test.isExecuted())

        # Locate file
        outfilename = alg_test.getProperty("OutputFilename").value
        try:
            ifile = open(outfilename)
            lines = ifile.readlines()
            ifile.close()
        except IOError as err:
            print("Unable to open file {0}.".format(outfilename))
            self.assertTrue(False)
            return

        # Last line cannot be empty, i.e., before EOF '\n' is not allowed
        lastline = lines[-1]
        self.assertTrue(len(lastline.strip()) > 0)

        # Number of lines
        self.assertEquals(len(lines), 2)

        # Check line
        firstdataline = lines[1]
        terms = firstdataline.strip().split("\t")
        self.assertEquals(len(terms), 5)

        # Get property
        pchargelog = ws.getRun().getProperty("proton_charge").value
        sumpcharge = numpy.sum(pchargelog)
        minpcharge = numpy.min(pchargelog)
        avgpcharge = numpy.average(pchargelog)

        v2 = float(terms[2])
        self.assertAlmostEqual(sumpcharge, v2)
        v3 = float(terms[3])
        self.assertAlmostEqual(minpcharge, v3)
        v4 = float(terms[4])
        self.assertAlmostEqual(avgpcharge, v4)

        # Remove generated files
        os.remove(outfilename)
        AnalysisDataService.remove("TestMatrixWS")

        return
 def test_len_increases_when_item_added(self):
     wsname = 'ADSTest_test_len_increases_when_item_added'
     current_len = len(AnalysisDataService)
     self._run_createws(wsname)
     self.assertEquals(len(AnalysisDataService), current_len + 1)
     # Remove to clean the test up
     AnalysisDataService.remove(wsname)
Exemplo n.º 9
0
    def _determine_factors(self, q_high_angle, q_low_angle, mode, scale, shift):

        # We need to make suret that the fitting only occurs in the y direction
        constant_x_shift_and_scale = ', f0.Shift=0.0, f0.XScaling=1.0'

        # Determine the StartQ and EndQ values
        q_min, q_max = self._get_start_q_and_end_q_values(rear_data=q_low_angle, front_data=q_high_angle)

        # We need to transfer the errors from the front data to the rear data, as we are using the the front data as a model, but
        # we want to take into account the errors of both workspaces.
        error_correction = ErrorTransferFromModelToData()
        front_data_corrected, rear_data_corrected = error_correction.get_error_corrected(rear_data=q_low_angle,
                                                                                         front_data=q_high_angle,
                                                                                         q_min=q_min, q_max=q_max)

        fit = self.createChildAlgorithm('Fit')

        # We currently have to put the front_data into the ADS so that the TabulatedFunction has access to it
        front_data_corrected = AnalysisDataService.addOrReplace('front_data_corrected', front_data_corrected)
        front_in_ads = AnalysisDataService.retrieve('front_data_corrected')

        function = 'name=TabulatedFunction, Workspace="' + str(
            front_in_ads.name()) + '"' + ";name=FlatBackground"

        fit.setProperty('Function', function)
        fit.setProperty('InputWorkspace', rear_data_corrected)

        constant_x_shift_and_scale = 'f0.Shift=0.0, f0.XScaling=1.0'
        if mode == Mode.BothFit:
            fit.setProperty('Ties', constant_x_shift_and_scale)
        elif mode == Mode.ShiftOnly:
            fit.setProperty('Ties', 'f0.Scaling=' + str(scale) + ',' + constant_x_shift_and_scale)
        elif mode == Mode.ScaleOnly:
            fit.setProperty('Ties', 'f1.A0=' + str(shift) + '*f0.Scaling,' + constant_x_shift_and_scale)
        else:
            raise RuntimeError('Unknown fitting mode requested.')

        fit.setProperty('StartX', q_min)
        fit.setProperty('EndX', q_max)
        fit.setProperty('CreateOutput', True)
        fit.execute()
        param = fit.getProperty('OutputParameters').value
        AnalysisDataService.remove(front_in_ads.name())

        # The outparameters are:
        # 1. Scaling in y direction
        # 2. Shift in x direction
        # 3. Scaling in x direction
        # 4. Shift in y direction

        scale = param.row(0)['Value']

        if scale == 0.0:
            raise RuntimeError('Fit scaling as part of stitching evaluated to zero')

        # In order to determine the shift, we need to remove the scale factor
        shift = param.row(3)['Value'] / scale

        return (shift, scale)
Exemplo n.º 10
0
    def test_exportFileAppend(self):
        """ Test to export logs without header file
        """
        # Generate the matrix workspace with some logs
        ws = self.createTestWorkspace()
        AnalysisDataService.addOrReplace("TestMatrixWS", ws)

        # Test algorithm
        # create new file
        alg_test = run_algorithm("ExportExperimentLog", 
            InputWorkspace = "TestMatrixWS",
            OutputFilename = "TestRecord.txt",
            SampleLogNames = ["run_number", "duration", "proton_charge"],
            SampleLogTitles = ["RUN", "Duration", "ProtonCharge"],
            SampleLogOperation = [None, None, "sum"],
            FileMode = "new")     
      
        # append
        alg_test = run_algorithm("ExportExperimentLog", 
            InputWorkspace = "TestMatrixWS",
            OutputFilename = "TestRecord.txt",
            SampleLogNames = ["run_number", "duration", "proton_charge"],
            SampleLogTitles = ["RUN", "Duration", "ProtonCharge"],
            SampleLogOperation = [None, None, "sum"],
            FileMode = "fastappend")

        # Validate
        self.assertTrue(alg_test.isExecuted())

        # Locate file
        outfilename = alg_test.getProperty("OutputFilename").value
        try:
            print "Output file is %s. " % (outfilename)
            ifile = open(outfilename)
            lines = ifile.readlines()
            ifile.close()
        except IOError as err:
            print "Unable to open file %s. " % (outfilename)
            self.assertTrue(False)
            return
            
        # Last line cannot be empty, i.e., before EOF '\n' is not allowed
        lastline = lines[-1]
        self.assertTrue(len(lastline.strip()) > 0)
        
        # Number of lines
        self.assertEquals(len(lines), 3)

        # Check line
        firstdataline = lines[1]
        terms = firstdataline.strip().split("\t")
        self.assertEquals(len(terms), 3)

        # 
        # # Remove generated files        
        os.remove(outfilename)
        AnalysisDataService.remove("TestMatrixWS")
        
        return
 def tearDown(self):
     self.cleanup_names.append(self.wsname)
     for name in self.cleanup_names:
         try:
             AnalysisDataService.remove(name)
         except KeyError:
             pass
     self.cleanup_names = []
    def test_observeDelete_calls_deleteHandle_when_set_on_ads_and_a_workspace_is_deleted(self):
        CreateSampleWorkspace(OutputWorkspace="ws")

        self.fake_class.observeDelete(True)
        self.fake_class.deleteHandle = mock.MagicMock()
        ADS.remove("ws")

        self.assertEqual(self.fake_class.deleteHandle.call_count, 1)
Exemplo n.º 13
0
 def test_add_raises_error_if_name_exists(self):
     data = [1.0,2.0,3.0]
     alg = run_algorithm('CreateWorkspace',DataX=data,DataY=data,NSpec=1,UnitX='Wavelength', child=True)
     name = "testws"
     ws = alg.getProperty("OutputWorkspace").value
     AnalysisDataService.addOrReplace(name, ws)
     self.assertRaises(RuntimeError, AnalysisDataService.add, name, ws)
     AnalysisDataService.remove(name)
Exemplo n.º 14
0
 def test_setTitleAndComment(self):
     run_algorithm('CreateWorkspace', OutputWorkspace='ws1',DataX=[1.,2.,3.], DataY=[2.,3.], DataE=[2.,3.],UnitX='TOF')
     ws1 = AnalysisDataService['ws1']
     title = 'test_title'
     ws1.setTitle(title)
     self.assertEquals(title, ws1.getTitle())
     comment = 'Some comment on this workspace.'
     ws1.setComment(comment)
     self.assertEquals(comment, ws1.getComment())
     AnalysisDataService.remove(ws1.name())
Exemplo n.º 15
0
 def _load_param_file(self, inst_name):
     InstrumentParameters.instrument_name = inst_name
     if IS_IN_MANTIDPLOT:
         idf_loc = config.getInstrumentDirectory()
         idf_pattern = os.path.join(idf_loc, "%s_Definition*.xml") % inst_name
         import glob
         idf_files = glob.glob(idf_pattern)
         emptyInst = LoadEmptyInstrument(Filename=str(idf_files[0]))
         InstrumentParameters._instrument = emptyInst.getInstrument()
         AnalysisDataService.remove(str(emptyInst)) # Don't need to keep workspace
Exemplo n.º 16
0
 def test_addOrReplace_replaces_workspace_with_existing_name(self):
     data = [1.0,2.0,3.0]
     alg = run_algorithm('CreateWorkspace',DataX=data,DataY=data,NSpec=1,UnitX='Wavelength', child=True)
     name = "testws"
     ws = alg.getProperty("OutputWorkspace").value
     AnalysisDataService.add(name, ws)
     len_before = len(AnalysisDataService)
     AnalysisDataService.addOrReplace(name, ws)
     len_after = len(AnalysisDataService)
     self.assertEquals(len_after, len_before)
     AnalysisDataService.remove(name)
Exemplo n.º 17
0
 def _deleteWorkspaces(self, workspaces):
     """
         Deletes a list of workspaces if they exist but ignores any errors
         @param workspaces: list of workspaces to try to delete
     """
     for wk in workspaces:
         try:
             if AnalysisDataService.doesExist(wk):
                 AnalysisDataService.remove(wk)
         except:
             #if the workspace can't be deleted this function does nothing
             pass
    def test_exportUTC(self):
        """ Test to export logs without header file
        """
        import os
        import os.path

        # Generate the matrix workspace with some logs
        ws = self.createTestWorkspace()
        AnalysisDataService.addOrReplace("TestMatrixWS", ws)

        # Test algorithm
        alg_test = run_algorithm(
            "ExportSampleLogsToCSVFile",
            InputWorkspace="TestMatrixWS",
            OutputFilename="furnace20339utc.txt",
            SampleLogNames=["SensorA", "SensorB", "SensorC"],
            WriteHeaderFile=True,
            TimeZone="UTC",
            Header="SensorA[K]\t SensorB[K]\t SensorC[K]",
        )

        # Validate
        self.assertTrue(alg_test.isExecuted())

        # Locate file
        outfilename = alg_test.getProperty("OutputFilename").value
        filepath = os.path.dirname(outfilename)
        basename = os.path.basename(outfilename)
        baseheadername = basename.split(".")[0] + "_header.txt"
        headerfilename = os.path.join(filepath, baseheadername)
        try:
            ifile = open(headerfilename)
            lines = ifile.readlines()
            ifile.close()
        except IOError as err:
            errmsg = "Unable to open header file %s. " % (headerfilename)
            self.assertEquals(errmsg, "")
            return

        # Count lines in the file
        goodlines = 0
        for line in lines:
            line = line.strip()
            if len(line) > 0:
                goodlines += 1
        self.assertEquals(goodlines, 3)

        # Clean
        os.remove(outfilename)
        os.remove(headerfilename)
        AnalysisDataService.remove("TestMatrixWS")

        return
Exemplo n.º 19
0
    def test_that_a_histogram_workspace_is_returned_as_a_MatrixWorkspace_from_ADS(self):
        wsname = "MatrixWorkspaceTest_ADS"
        AnalysisDataService.add(wsname, self._test_ws)

        value = AnalysisDataService[wsname]
        self.assertTrue(isinstance(value, Workspace))
        # Have got a MatrixWorkspace back and not just the generic interface
        self.assertTrue(isinstance(value, MatrixWorkspace))
        mem = value.getMemorySize()
        self.assertTrue((mem > 0))

        AnalysisDataService.remove(wsname)
Exemplo n.º 20
0
 def test_history_access(self):
     run_algorithm('CreateWorkspace', OutputWorkspace='raw',DataX=[1.,2.,3.], DataY=[2.,3.], DataE=[2.,3.],UnitX='TOF')
     run_algorithm('Rebin', InputWorkspace='raw', Params=[1.,0.5,3.],OutputWorkspace='raw')
     raw = AnalysisDataService['raw']
     history = raw.getHistory()
     last = history.lastAlgorithm()
     self.assertEquals(last.name(), "Rebin")
     self.assertEquals(last.getPropertyValue("InputWorkspace"), "raw")
     first = history[0]
     self.assertEquals(first.name(), "CreateWorkspace")
     self.assertEquals(first.getPropertyValue("OutputWorkspace"), "raw")
     AnalysisDataService.remove('raw')
Exemplo n.º 21
0
    def test_batch_reduction_on_time_sliced_file(self):
        # Arrange
        # Build the data information
        file_information_factory = SANSFileInformationFactory()
        file_information = file_information_factory.create_sans_file_information("SANS2D00034484")

        data_builder = get_data_builder(SANSFacility.ISIS, file_information)
        data_builder.set_sample_scatter("SANS2D00034484")
        data_builder.set_sample_transmission("SANS2D00034505")
        data_builder.set_sample_direct("SANS2D00034461")
        data_builder.set_can_scatter("SANS2D00034481")
        data_builder.set_can_transmission("SANS2D00034502")
        data_builder.set_can_direct("SANS2D00034461")

        data_builder.set_calibration("TUBE_SANS2D_BOTH_31681_25Sept15.nxs")

        data_info = data_builder.build()

        # Get the rest of the state from the user file
        user_file_director = StateDirectorISIS(data_info, file_information)
        user_file_director.set_user_file("USER_SANS2D_154E_2p4_4m_M3_Xpress_8mm_SampleChanger.txt")
        # Set the reduction mode to LAB
        user_file_director.set_reduction_builder_reduction_mode(ISISReductionMode.LAB)
        # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
        # COMPATIBILITY BEGIN -- Remove when appropriate
        # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
        # Since we are dealing with event based data but we want to compare it with histogram data from the
        # old reduction system we need to enable the compatibility mode
        user_file_director.set_compatibility_builder_use_compatibility_mode(True)
        # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
        # COMPATIBILITY END
        # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
        user_file_director.set_slice_event_builder_start_time([1.0,3.0])
        user_file_director.set_slice_event_builder_end_time([3.0,5.0])

        state = user_file_director.construct()

        # Act
        states = [state]
        self._run_batch_reduction(states, use_optimizations=False)

        expected_workspaces = ["34484rear_1D_1.75_16.5_t1.00_T3.00", "34484rear_1D_1.75_16.5_t3.00_T5.00"]
        reference_file_names = ["SANS2D_event_slice_referance_t1.00_T3.00.nxs", "SANS2D_event_slice_referance_t3.00_T5.00.nxs"]

        for element, reference_file in zip(expected_workspaces, reference_file_names):
            self.assertTrue(AnalysisDataService.doesExist(element))
            # Evaluate it up to a defined point
            self._compare_workspace(element, reference_file)

        # Clean up
        for element in expected_workspaces:
            AnalysisDataService.remove(element)
Exemplo n.º 22
0
    def test_batch_reduction_on_period_time_sliced_wavelength_range_data(self):
        # Arrange
        # Build the data information
        file_information_factory = SANSFileInformationFactory()
        file_information = file_information_factory.create_sans_file_information("SANS2D0005512")

        data_builder = get_data_builder(SANSFacility.ISIS, file_information)
        data_builder.set_sample_scatter("SANS2D0005512")
        data_builder.set_sample_scatter_period(1)

        data_info = data_builder.build()

        # Get the rest of the state from the user file
        user_file_director = StateDirectorISIS(data_info, file_information)
        user_file_director.set_user_file("MASKSANS2Doptions.091A")
        # Set the reduction mode to LAB
        user_file_director.set_reduction_builder_reduction_mode(ISISReductionMode.LAB)

        user_file_director.set_slice_event_builder_start_time([1.0, 3.0])
        user_file_director.set_slice_event_builder_end_time([3.0, 5.0])

        state = user_file_director.construct()

        start = [1.0, 1.0]
        end = [3.0, 2.0]
        state.wavelength.wavelength_low = start
        state.wavelength.wavelength_high = end

        state.adjustment.normalize_to_monitor.wavelength_low = start
        state.adjustment.normalize_to_monitor.wavelength_high = end

        state.adjustment.calculate_transmission.wavelength_low = start
        state.adjustment.calculate_transmission.wavelength_high = end

        state.adjustment.wavelength_and_pixel_adjustment.wavelength_low = start
        state.adjustment.wavelength_and_pixel_adjustment.wavelength_high = end

        # Act
        states = [state]
        self._run_batch_reduction(states, use_optimizations=False)

        # Assert
        # We only assert that the expected workspaces exist on the ADS
        expected_workspaces = ["5512p1rear_1D_1.0_2.0Phi-45.0_45.0_t1.00_T3.00", "5512p1rear_1D_1.0_2.0Phi-45.0_45.0_t3.00_T5.00",
                               "5512p1rear_1D_1.0_3.0Phi-45.0_45.0_t1.00_T3.00", "5512p1rear_1D_1.0_3.0Phi-45.0_45.0_t3.00_T5.00"
                               ]
        for element in expected_workspaces:
            self.assertTrue(AnalysisDataService.doesExist(element))

        # Clean up
        for element in expected_workspaces:
            AnalysisDataService.remove(element)
def _removeWorkspace(workspace_name):
    """Remove the workspace with the given name, including any child workspaces if it
    is a group. If a corresponding monitors workspace exists, remove that too."""
    if AnalysisDataService.doesExist(workspace_name):
        workspace = AnalysisDataService.retrieve(workspace_name)
        if isinstance(workspace, WorkspaceGroup):
            # Remove child workspaces first
            while workspace.getNumberOfEntries():
                _removeWorkspace(workspace[0].name())
        AnalysisDataService.remove(workspace_name)
    # If a corresponding monitors workspace also exists, remove that too
    if AnalysisDataService.doesExist(_monitorWorkspace(workspace_name)):
        _removeWorkspace(_monitorWorkspace(workspace_name))
Exemplo n.º 24
0
    def test_key_operator_does_same_as_retrieve(self):
        wsname = 'ADSTest_test_key_operator_does_same_as_retrieve'
        self._run_createws(wsname)
        ws_from_op = AnalysisDataService[wsname]
        ws_from_method = AnalysisDataService.retrieve(wsname)
        
        self.do_check_for_matrix_workspace_type(ws_from_op)
        self.do_check_for_matrix_workspace_type(ws_from_method)
        
        self.assertEquals(ws_from_op.name(), ws_from_method.name())
        self.assertEquals(ws_from_op.getMemorySize(), ws_from_method.getMemorySize())

        # Remove to clean the test up
        AnalysisDataService.remove(wsname)
    def test_exportFileMissingLog(self):
        """ Test to export logs without header file
        """
        # Generate the matrix workspace with some logs
        ws = self.createTestWorkspace()
        AnalysisDataService.addOrReplace("TestMatrixWS", ws)

        # Test algorithm
        alg_test = run_algorithm(
            "ExportSampleLogsToCSVFile",
            InputWorkspace="TestMatrixWS",
            OutputFilename="furnace20335.txt",
            SampleLogNames=["SensorA", "SensorB", "SensorX", "SensorC"],
            WriteHeaderFile=False,
        )

        # Validate
        self.assertTrue(alg_test.isExecuted())

        # Locate file
        outfilename = alg_test.getProperty("OutputFilename").value
        try:
            ifile = open(outfilename)
            lines = ifile.readlines()
            ifile.close()
        except IOError as err:
            print "Unable to open file %s. " % (outfilename)
            self.assertTrue(False)
            return

        # Count lines in the file
        goodlines = 0
        for line in lines:
            line = line.strip()
            if len(line) > 0:
                goodlines += 1
        self.assertEquals(goodlines, 25)

        # Check values
        line0 = lines[0]
        terms = line0.split()
        self.assertEquals(len(terms), 6)
        value2 = float(terms[4])
        self.assertEquals(value2, 0.0)

        # Clean
        os.remove(outfilename)
        AnalysisDataService.remove("TestMatrixWS")

        return
 def _mask_to_roi(self, ws_mask):
     # invert mask and then extract "masked" detectors in order to get ROI
     # BUG in Mantid forces us to use AnalysisDataService
     alg = AlgorithmManager.create("InvertMask")
     alg.initialize()
     alg.setProperty("InputWorkspace", ws_mask)
     alg.setPropertyValue("OutputWorkspace", "_ws")
     alg.execute()
     ws_tranmskinv = AnalysisDataService.retrieve("_ws")
     alg = self.createChildAlgorithm("ExtractMask")
     alg.setProperty("InputWorkspace", ws_tranmskinv)
     alg.execute()
     AnalysisDataService.remove("_ws")
     return alg.getProperty("DetectorList").value
Exemplo n.º 27
0
    def test_exportFileAndHeader(self):
        """ Test to export logs without header file
        """
        import os
        import os.path
        # Generate the matrix workspace with some logs
        ws = self.createTestWorkspace()
        AnalysisDataService.addOrReplace("TestMatrixWS", ws)

        # Test algorithm
        alg_test = run_algorithm(
            "ExportSampleLogsToCSVFile",
            InputWorkspace="TestMatrixWS",
            OutputFilename="furnace20339.txt",
            SampleLogNames=["SensorA", "SensorB", "SensorC"],
            WriteHeaderFile=True,
            Header="SensorA[K]\t SensorB[K]\t SensorC[K]")

        # Validate
        self.assertTrue(alg_test.isExecuted())

        # Locate file
        outfilename = alg_test.getProperty("OutputFilename").value
        filepath = os.path.dirname(outfilename)
        basename = os.path.basename(outfilename)
        baseheadername = basename.split(".")[0] + "_header.txt"
        headerfilename = os.path.join(filepath, baseheadername)
        try:
            ifile = open(headerfilename)
            lines = ifile.readlines()
            ifile.close()
        except IOError as err:
            errmsg = "Unable to open header file %s. " % (headerfilename)
            self.assertEquals(errmsg, "")
            return

        # Count lines in the file
        goodlines = 0
        for line in lines:
            line = line.strip()
            if len(line) > 0:
                goodlines += 1
        self.assertEquals(goodlines, 3)

        # Clean
        os.remove(outfilename)
        os.remove(headerfilename)
        AnalysisDataService.remove("TestMatrixWS")

        return
Exemplo n.º 28
0
    def test_LoadMultipleGSSTest(self):
        # Set up
        alg_test = run_algorithm("LoadMultipleGSS",
                                 FilePrefix = "PG3",
                                 RunNumbers = [11485,11486],
                                 Directory  = "")
        # Execute
        self.assertTrue(alg_test.isExecuted())

        # just make sure there are output workspaces then delete them
        for name in ["PG3_11485", "PG3_11486"]:
            wksp = AnalysisDataService.retrieve(name)
            self.assertTrue(wksp is not None)
            AnalysisDataService.remove(name)
Exemplo n.º 29
0
    def test_that_a_histogram_workspace_is_returned_as_a_MatrixWorkspace_from_a_property(self):
        wsname = "MatrixWorkspaceTest_Property"
        AnalysisDataService.add(wsname, self._test_ws)

        alg = create_algorithm("Rebin", InputWorkspace=wsname)
        propValue = alg.getProperty("InputWorkspace").value
        # Is Workspace in the hierarchy of the value
        self.assertTrue(isinstance(propValue, Workspace))
        # Have got a MatrixWorkspace back and not just the generic interface
        self.assertTrue(isinstance(propValue, MatrixWorkspace))
        mem = propValue.getMemorySize()
        self.assertTrue((mem > 0))

        AnalysisDataService.remove(wsname)
Exemplo n.º 30
0
    def test_that_a_histogram_workspace_is_returned_as_a_MatrixWorkspace_from_a_property(self):
        wsname = "MatrixWorkspaceTest_Property"
        AnalysisDataService.add(wsname, self._test_ws)

        alg = create_algorithm("Rebin", InputWorkspace=wsname)
        propValue = alg.getProperty("InputWorkspace").value
        # Is Workspace in the hierarchy of the value
        self.assertTrue(isinstance(propValue, Workspace))
        # Have got a MatrixWorkspace back and not just the generic interface
        self.assertTrue(isinstance(propValue, MatrixWorkspace))
        mem = propValue.getMemorySize()
        self.assertGreater(mem, 0)

        AnalysisDataService.remove(wsname)
Exemplo n.º 31
0
    def test_nomad_no_mins(self):
        api.LoadNexusProcessed(Filename='NOM_91796_banks.nxs', OutputWorkspace='NOM_91796_banks')
        alg_test = run_algorithm('CropWorkspaceRagged',
                                 InputWorkspace='NOM_91796_banks', OutputWorkspace='NOM_91796_banks',
                                 XMax=[10.20, 20.8, np_inf, math_nan, np_nan, 9.35])

        self.assertTrue(alg_test.isExecuted())

        # Verify ....
        outputws = AnalysisDataService.retrieve('NOM_91796_banks')
        for i, Xlen in enumerate([511,1041,2001,2001,2001,468]): # larger than in test_nomad_inplace
            self.assertEqual(len(outputws.readX(i)), Xlen)

        AnalysisDataService.remove('NOM_91796_banks')
Exemplo n.º 32
0
    def test_batch_reduction_on_period_time_sliced_wavelength_range_data(self):
        # Arrange
        # Build the data information
        file_information_factory = SANSFileInformationFactory()
        file_information = file_information_factory.create_sans_file_information(
            "SANS2D0005512")

        data_builder = get_data_builder(SANSFacility.ISIS, file_information)
        data_builder.set_sample_scatter("SANS2D0005512")
        data_builder.set_sample_scatter_period(1)

        data_info = data_builder.build()

        # Get the rest of the state from the user file
        user_filename = "MASKSANS2Doptions.091A"
        user_file_director = UserFileReaderAdapter(
            file_information=file_information, user_file_name=user_filename)
        state = user_file_director.get_all_states(
            file_information=file_information)
        # Set the reduction mode to LAB
        state.reduction.reduction_mode = ReductionMode.LAB
        state.data = data_info

        state.slice.start_time = [1.0, 3.0]
        state.slice.end_time = [3.0, 5.0]

        selected_range = [(1.0, 2.0), (1.0, 3.0)]
        state.wavelength.wavelength_interval.selected_ranges = selected_range
        state.adjustment.calculate_transmission.wavelength_interval.selected_ranges = selected_range
        state.adjustment.wavelength_and_pixel_adjustment.wavelength_interval.selected_ranges = selected_range

        # Act
        states = [state]
        self._run_batch_reduction(states, use_optimizations=False)

        # Assert
        # We only assert that the expected workspaces exist on the ADS
        expected_workspaces = [
            "5512_p1rear_1D_1.0_2.0Phi-45.0_45.0_t1.00_T3.00",
            "5512_p1rear_1D_1.0_2.0Phi-45.0_45.0_t3.00_T5.00",
            "5512_p1rear_1D_1.0_3.0Phi-45.0_45.0_t1.00_T3.00",
            "5512_p1rear_1D_1.0_3.0Phi-45.0_45.0_t3.00_T5.00"
        ]
        for element in expected_workspaces:
            self.assertTrue(AnalysisDataService.doesExist(element))

        # Clean up
        for element in expected_workspaces:
            AnalysisDataService.remove(element)
Exemplo n.º 33
0
    def test_exportFileMissingLog(self):
        """ Test to export logs without header file
        """
        # Generate the matrix workspace with some logs
        ws = self.createTestWorkspace()
        AnalysisDataService.addOrReplace("TestMatrixWS", ws)

        # Test algorithm
        alg_test = run_algorithm(
            "ExportSampleLogsToCSVFile",
            InputWorkspace="TestMatrixWS",
            OutputFilename="furnace20335.txt",
            SampleLogNames=["SensorA", "SensorB", "SensorX", "SensorC"],
            WriteHeaderFile=False)

        # Validate
        self.assertTrue(alg_test.isExecuted())

        # Locate file
        outfilename = alg_test.getProperty("OutputFilename").value
        try:
            ifile = open(outfilename)
            lines = ifile.readlines()
            ifile.close()
        except IOError as err:
            print("Unable to open file {0}.".format(outfilename))
            self.assertTrue(False)
            return

        # Count lines in the file
        goodlines = 0
        for line in lines:
            line = line.strip()
            if len(line) > 0:
                goodlines += 1
        self.assertEquals(goodlines, 25)

        # Check values
        line0 = lines[0]
        terms = line0.split()
        self.assertEquals(len(terms), 6)
        value2 = float(terms[4])
        self.assertEquals(value2, 0.)

        # Clean
        os.remove(outfilename)
        AnalysisDataService.remove("TestMatrixWS")

        return
Exemplo n.º 34
0
    def test_nomad_inplace(self):
        api.LoadNexusProcessed(Filename='NOM_91796_banks.nxs', OutputWorkspace='NOM_91796_banks')
        alg_test = run_algorithm('CropWorkspaceRagged',
                                 InputWorkspace='NOM_91796_banks', OutputWorkspace='NOM_91796_banks',
                                 XMin=[0.67, 1.20, 2.42, 3.70, 4.12, 0.39],
                                 XMax=[10.20, 20.8, np_nan, math_nan, np_nan, 9.35])

        self.assertTrue(alg_test.isExecuted())

        # Verify ....
        outputws = AnalysisDataService.retrieve('NOM_91796_banks')
        for i, Xlen in enumerate([477,981,1880,1816,1795,448]):
            self.assertEqual(len(outputws.readX(i)), Xlen)

        AnalysisDataService.remove('NOM_91796_banks')
Exemplo n.º 35
0
    def test_key_operator_does_same_as_retrieve(self):
        wsname = 'ADSTest_test_key_operator_does_same_as_retrieve'
        self._run_createws(wsname)
        ws_from_op = AnalysisDataService[wsname]
        ws_from_method = AnalysisDataService.retrieve(wsname)

        self.do_check_for_matrix_workspace_type(ws_from_op)
        self.do_check_for_matrix_workspace_type(ws_from_method)

        self.assertEquals(ws_from_op.name(), ws_from_method.name())
        self.assertEquals(ws_from_op.getMemorySize(),
                          ws_from_method.getMemorySize())

        # Remove to clean the test up
        AnalysisDataService.remove(wsname)
Exemplo n.º 36
0
 def test_setTitleAndComment(self):
     run_algorithm('CreateWorkspace',
                   OutputWorkspace='ws1',
                   DataX=[1., 2., 3.],
                   DataY=[2., 3.],
                   DataE=[2., 3.],
                   UnitX='TOF')
     ws1 = AnalysisDataService['ws1']
     title = 'test_title'
     ws1.setTitle(title)
     self.assertEqual(title, ws1.getTitle())
     comment = 'Some comment on this workspace.'
     ws1.setComment(comment)
     self.assertEqual(comment, ws1.getComment())
     AnalysisDataService.remove(ws1.name())
Exemplo n.º 37
0
 def test_addOrReplace_replaces_workspace_with_existing_name(self):
     data = [1.0, 2.0, 3.0]
     alg = run_algorithm('CreateWorkspace',
                         DataX=data,
                         DataY=data,
                         NSpec=1,
                         UnitX='Wavelength',
                         child=True)
     name = "testws"
     ws = alg.getProperty("OutputWorkspace").value
     AnalysisDataService.add(name, ws)
     len_before = len(AnalysisDataService)
     AnalysisDataService.addOrReplace(name, ws)
     len_after = len(AnalysisDataService)
     self.assertEquals(len_after, len_before)
     AnalysisDataService.remove(name)
 def test_removing_item_invalidates_extracted_handles(self):
     # If a reference to a DataItem has been extracted from the ADS
     # and it is then removed. The extracted handle should no longer
     # be able to access the DataItem
     wsname = 'ADSTest_test_removing_item_invalidates_extracted_handles'
     self._run_createws(wsname)
     ws_handle = AnalysisDataService[wsname]
     succeeded = False
     try:
         ws_handle.id() # Should be okay
         succeeded = True
     except RuntimeError:
         pass
     self.assertTrue(succeeded, "DataItem handle should be valid and allow function calls")
     AnalysisDataService.remove(wsname)
     self.assertRaises(RuntimeError, ws_handle.id)
    def test_batch_reduction_on_multiperiod_file(self):
        # Arrange
        # Build the data information
        file_information_factory = SANSFileInformationFactory()
        file_information = file_information_factory.create_sans_file_information(
            "SANS2D0005512")

        data_builder = get_data_builder(SANSFacility.ISIS, file_information)
        data_builder.set_sample_scatter("SANS2D0005512")

        data_info = data_builder.build()

        # Get the rest of the state from the user file
        user_file_director = StateDirectorISIS(data_info, file_information)
        user_file_director.set_user_file("MASKSANS2Doptions.091A")
        # Set the reduction mode to LAB
        user_file_director.set_reduction_builder_reduction_mode(
            ISISReductionMode.LAB)
        state = user_file_director.construct()

        # Act
        states = [state]
        self._run_batch_reduction(states, use_optimizations=False)

        # Assert
        # We only assert that the expected workspaces exist on the ADS
        expected_workspaces = [
            "5512p1rear_1D_2.0_14.0Phi-45.0_45.0",
            "5512p2rear_1D_2.0_14.0Phi-45.0_45.0",
            "5512p3rear_1D_2.0_14.0Phi-45.0_45.0",
            "5512p4rear_1D_2.0_14.0Phi-45.0_45.0",
            "5512p5rear_1D_2.0_14.0Phi-45.0_45.0",
            "5512p6rear_1D_2.0_14.0Phi-45.0_45.0",
            "5512p7rear_1D_2.0_14.0Phi-45.0_45.0",
            "5512p8rear_1D_2.0_14.0Phi-45.0_45.0",
            "5512p9rear_1D_2.0_14.0Phi-45.0_45.0",
            "5512p10rear_1D_2.0_14.0Phi-45.0_45.0",
            "5512p11rear_1D_2.0_14.0Phi-45.0_45.0",
            "5512p12rear_1D_2.0_14.0Phi-45.0_45.0",
            "5512p13rear_1D_2.0_14.0Phi-45.0_45.0"
        ]
        for element in expected_workspaces:
            self.assertTrue(AnalysisDataService.doesExist(element))

        # Clean up
        for element in expected_workspaces:
            AnalysisDataService.remove(element)
Exemplo n.º 40
0
    def test_that_batch_reduction_evaluates_LAB(self):
        # Arrange
        # Build the data information
        file_information_factory = SANSFileInformationFactory()
        file_information = file_information_factory.create_sans_file_information(
            "SANS2D00034484")

        data_builder = get_data_builder(SANSFacility.ISIS, file_information)
        data_builder.set_sample_scatter("SANS2D00034484")
        data_builder.set_sample_transmission("SANS2D00034505")
        data_builder.set_sample_direct("SANS2D00034461")
        data_builder.set_can_scatter("SANS2D00034481")
        data_builder.set_can_transmission("SANS2D00034502")
        data_builder.set_can_direct("SANS2D00034461")

        data_builder.set_calibration("TUBE_SANS2D_BOTH_31681_25Sept15.nxs")

        data_info = data_builder.build()

        user_filename = "USER_SANS2D_154E_2p4_4m_M3_Xpress_8mm_SampleChanger.txt"

        user_file_director = StateBuilder.new_instance(
            file_information=file_information,
            data_information=data_info,
            user_filename=user_filename)

        # Get the rest of the state from the user file
        state = user_file_director.get_all_states()

        # Set the reduction mode to LAB
        state.reduction.reduction_mode = ReductionMode.LAB
        # Since we are dealing with event based data but we want to compare it with histogram data from the
        # old reduction system we need to enable the compatibility mode
        state.compatibility.use_compatibility_mode = True  # COMPATIBILITY BEGIN -- Remove when appropriate

        # Act
        states = [state]
        self._run_batch_reduction(states, use_optimizations=False)
        workspace_name = "34484_rear_1D_1.75_16.5"
        output_workspace = AnalysisDataService.retrieve(workspace_name)

        # Evaluate it up to a defined point
        reference_file_name = "SANS2D_ws_D20_reference_LAB_1D.nxs"
        self._compare_workspace(output_workspace, reference_file_name)

        if AnalysisDataService.doesExist(workspace_name):
            AnalysisDataService.remove(workspace_name)
Exemplo n.º 41
0
    def calculate(self, reducer, wave_wksps=[]):
        """
            Multiplies all the wavelength scalings into one workspace and all the detector
            dependent scalings into another workspace that can be used by ConvertToQ. It is important
            that the wavelength correction workspaces have a know distribution/non-distribution state
            @param reducer: settings used for this reduction
            @param wave_wksps: additional wavelength dependent correction workspaces to include
        """
        for step in self._wave_steps:
            if step.output_wksp:
                wave_wksps.append(step.output_wksp)

        wave_adj = None
        for wksp in wave_wksps:
            # before the workspaces can be combined they all need to match
            api.RebinToWorkspace(WorkspaceToRebin=wksp,
                                 WorkspaceToMatch=reducer.output_wksp,
                                 OutputWorkspace=self.TMP_WORKSPACE_NAME)

            if not wave_adj:
                # first time around this loop
                wave_adj = self.WAVE_CORR_NAME
                api.RenameWorkspace(InputWorkspace=self.TMP_WORKSPACE_NAME,
                                    OutputWorkspace=wave_adj)
            else:
                api.Multiply(LHSWorkspace=self.TMP_WORKSPACE_NAME,
                             RHSWorkspace=wave_adj,
                             OutputWorkspace=wave_adj)

        # read pixel correction file
        # note the python code below is an attempt to emulate function overloading
        # If a derived class overwrite self._load and self._load_params then
        # a custom specific loading can be achieved
        pixel_adj = ''
        if self._pixel_file:
            pixel_adj = self.PIXEL_CORR_NAME
            load_com = self._load + '(Filename="' + self._pixel_file + '",OutputWorkspace="' + pixel_adj + '"'
            if self._load_params:
                load_com += ',' + self._load_params
            load_com += ')'
            eval(load_com)

        if AnalysisDataService.doesExist(self.TMP_WORKSPACE_NAME):
            AnalysisDataService.remove(self.TMP_WORKSPACE_NAME)

        return wave_adj, pixel_adj
Exemplo n.º 42
0
    def test_genHKLList(self):
        """ Test to load a .hkl file
        """
        # Set up
        alg_test = run_algorithm("CreateLeBailFitInput",
                                 ReflectionsFile="",
                                 MaxHKL="12,12,12",
                                 FullprofParameterFile="2011B_HR60b2.irf",
                                 Bank=2,
                                 LatticeConstant=4.66,
                                 GenerateBraggReflections=True,
                                 InstrumentParameterWorkspace="PG3_Bank2_Foo2",
                                 BraggPeakParameterWorkspace="Arb_Peaks")

        # Execute
        self.assertTrue(alg_test.isExecuted())

        # Verify some values
        # Profile parameter workspace
        paramws = AnalysisDataService.retrieve("PG3_Bank2_Foo2")

        paramname0 = paramws.cell(0, 0)

        if paramname0.lower() == "bank":
            numrowgood = 28
        else:
            numrowgood = 27
        #print "Parameter name of first line = ", paramname0

        #self.assertEqual(numrowgood, paramws.rowCount())

        paramnames = []
        for i in range(paramws.rowCount()):
            paramname = paramws.cell(i, 0)
            paramnames.append(paramname)
        self.assertEqual(paramnames.count("LatticeConstant"), 1)

        # Bragg peak list
        braggws = AnalysisDataService.retrieve("Arb_Peaks")
        self.assertEqual(braggws.rowCount() > 20, True)

        # 4. Delete the test hkl file
        AnalysisDataService.remove("PG3_Bank2_Foo2")
        AnalysisDataService.remove("Arb_Peaks")

        return
Exemplo n.º 43
0
    def test_retrieve_workspaces_uses_weak_ptrs(self):
        ws_names = ["test_retrieve_workspaces_1", "test_retrieve_workspaces_2"]
        for name in ws_names:
            self._run_createws(name)
        workspaces = AnalysisDataService.retrieveWorkspaces(ws_names)
        self.assertEqual(len(workspaces), 2)

        AnalysisDataService.remove(ws_names[0])
        # even though workspace has been deleted this should not affect workspaces size
        self.assertEqual(len(workspaces), 2)

        # check that the second workspace pointer in workspaces exists and can be used
        str(workspaces[1])

        # if a weak pointer has been used we expect a RuntimeError. Any other pointer will result in a different error
        with self.assertRaises(RuntimeError):
            str(workspaces[0])
Exemplo n.º 44
0
    def test_exportFile2(self):
        """ Get a partial of real load frame log values, and set them to
        different logs
        """
        # Generate the matrix workspace with some logs
        ws = self.createTestWorkspace2()
        AnalysisDataService.addOrReplace("TestMatrixWS2", ws)

        # Test algorithm
        alg_test = run_algorithm(
            "ExportSampleLogsToCSVFile",
            InputWorkspace="TestMatrixWS2",
            OutputFilename="furnace20334.txt",
            SampleLogNames=["SensorA", "SensorB", "SensorC", "SensorD"],
            WriteHeaderFile=False,
            TimeTolerance=1.0)

        # Validate
        self.assertTrue(alg_test.isExecuted())

        # Locate file
        outfilename = alg_test.getProperty("OutputFilename").value
        try:
            ifile = open(outfilename)
            lines = ifile.readlines()
            ifile.close()
        except IOError as err:
            print("Unable to open file {0}.".format(outfilename))
            self.assertTrue(False)
            return

        # Count lines in the file
        goodlines = 0
        for line in lines:
            line = line.strip()
            if len(line) > 0 and len(
                    line.split()) == 6 and line.startswith('76130'):
                goodlines += 1
        self.assertEquals(goodlines, 64)

        # Remove generated files
        os.remove(outfilename)
        AnalysisDataService.remove("TestMatrixWS2")

        return
    def test_nomad_inplace(self):
        api.LoadNexusProcessed(Filename='NOM_91796_banks.nxs',
                               OutputWorkspace='NOM_91796_banks')
        alg_test = run_algorithm(
            'CropWorkspaceRagged',
            InputWorkspace='NOM_91796_banks',
            OutputWorkspace='NOM_91796_banks',
            XMin=[0.67, 1.20, 2.42, 3.70, 4.12, 0.39],
            XMax=[10.20, 20.8, np_nan, math_nan, np_nan, 9.35])

        self.assertTrue(alg_test.isExecuted())

        # Verify ....
        outputws = AnalysisDataService.retrieve('NOM_91796_banks')
        for i, Xlen in enumerate([477, 981, 1880, 1816, 1795, 448]):
            self.assertEqual(len(outputws.readX(i)), Xlen)

        AnalysisDataService.remove('NOM_91796_banks')
    def test_nomad_no_mins(self):
        api.LoadNexusProcessed(Filename='NOM_91796_banks.nxs',
                               OutputWorkspace='NOM_91796_banks')
        alg_test = run_algorithm(
            'CropWorkspaceRagged',
            InputWorkspace='NOM_91796_banks',
            OutputWorkspace='NOM_91796_banks',
            XMax=[10.20, 20.8, np_inf, math_nan, np_nan, 9.35])

        self.assertTrue(alg_test.isExecuted())

        # Verify ....
        outputws = AnalysisDataService.retrieve('NOM_91796_banks')
        for i, Xlen in enumerate([511, 1041, 2001, 2001, 2001,
                                  468]):  # larger than in test_nomad_inplace
            self.assertEqual(len(outputws.readX(i)), Xlen)

        AnalysisDataService.remove('NOM_91796_banks')
Exemplo n.º 47
0
    def test_create_with_2D_numpy_array(self):
        x = np.array([1., 2., 3., 4.])
        y = np.array([[1., 2., 3.], [4., 5., 6.]])
        e = np.sqrt(y)

        wksp = CreateWorkspace(DataX=x, DataY=y, DataE=e, NSpec=2, UnitX='TOF')
        self.assertTrue(isinstance(wksp, MatrixWorkspace))
        self.assertEqual(wksp.getNumberHistograms(), 2)

        for i in [0, 1]:
            for j in range(len(y[0])):
                self.assertEqual(wksp.readY(i)[j], y[i][j])
                self.assertEqual(wksp.readE(i)[j], e[i][j])
                self.assertEqual(wksp.readX(i)[j], x[j])
            # Last X value
            self.assertEqual(wksp.readX(i)[len(x) - 1], x[len(x) - 1])

        AnalysisDataService.remove("wksp")
    def test_exportFileOnly(self):
        """ Test to export logs without header file
        """
        # Generate the matrix workspace with some logs
        ws = self.createTestWorkspace()
        AnalysisDataService.addOrReplace("TestMatrixWS", ws)

        # Test algorithm
        alg_test = run_algorithm(
            "ExportSampleLogsToCSVFile",
            InputWorkspace="TestMatrixWS",
            OutputFilename="furnace20333.txt",
            SampleLogNames=["SensorA", "SensorB", "SensorC"],
            WriteHeaderFile=False)

        # Validate
        self.assertTrue(alg_test.isExecuted())

        # Locate file
        outfilename = alg_test.getProperty("OutputFilename").value
        try:
            ifile = open(outfilename)
            lines = ifile.readlines()
            ifile.close()
        except IOError as err:
            print("Unable to open file {0}.".format(outfilename))
            self.fail()
            return

        # Count lines in the file
        goodlines = 0
        for line in lines:
            line = line.strip()
            if len(line) > 0:
                goodlines += 1
            # ENDIF
        # ENDFOR
        self.assertEqual(goodlines, 25)

        # Remove generated files
        os.remove(outfilename)
        AnalysisDataService.remove("TestMatrixWS")

        return
Exemplo n.º 49
0
    def test_nomad_no_mins(self):
        api.LoadNexusProcessed(Filename="NOM_91796_banks.nxs",
                               OutputWorkspace="NOM_91796_banks")
        alg_test = run_algorithm(
            "RebinRagged",
            InputWorkspace="NOM_91796_banks",
            OutputWorkspace="NOM_91796_banks",
            Delta=0.04,  # double original data bin size
            XMax=[10.20, 20.8, np_inf, math_nan, np_nan, 9.35])

        self.assertTrue(alg_test.isExecuted())

        # Verify ....
        outputws = AnalysisDataService.retrieve("NOM_91796_banks")
        for i, Xlen in enumerate([256, 521, 1001, 1001, 1001,
                                  235]):  # larger than in test_nomad_inplace
            self.assertEqual(len(outputws.readX(i)), Xlen)

        AnalysisDataService.remove("NOM_91796_banks")
Exemplo n.º 50
0
 def _collapse_workspace_groups(self, workspaces):
     """Given a list of workspaces, which themselves could be groups of workspaces,
     return a new list of workspaces which are TOF"""
     ungrouped_workspaces = set([])
     delete_ws_group_flag = True
     for ws_name in workspaces:
         ws = AnalysisDataService.retrieve(ws_name)
         if isinstance(ws, WorkspaceGroup):
             ungrouped_workspaces = ungrouped_workspaces.union(
                 self._collapse_workspace_groups(ws.getNames()))
             if delete_ws_group_flag is True:
                 AnalysisDataService.remove(ws_name)
         else:
             if (ws.getAxis(0).getUnit().unitID()) == 'TOF':
                 ungrouped_workspaces.add(ws_name)
             else:
                 # Do not remove the workspace group from the ADS if a non-TOF workspace exists
                 delete_ws_group_flag = False
     return ungrouped_workspaces
Exemplo n.º 51
0
    def test_nomad_inplace(self):
        api.LoadNexusProcessed(Filename="NOM_91796_banks.nxs",
                               OutputWorkspace="NOM_91796_banks")
        alg_test = run_algorithm(
            "RebinRagged",
            InputWorkspace="NOM_91796_banks",
            OutputWorkspace="NOM_91796_banks",
            XMin=[0.67, 1.20, 2.42, 3.70, 4.12, 0.39],
            Delta=0.02,  # original data bin size
            XMax=[10.20, 20.8, np_nan, math_nan, np_nan, 9.35])

        self.assertTrue(alg_test.isExecuted())

        # Verify ....
        outputws = AnalysisDataService.retrieve("NOM_91796_banks")
        for i, Xlen in enumerate([478, 981, 1880, 1816, 1795, 449]):
            self.assertEqual(len(outputws.readX(i)), Xlen)

        AnalysisDataService.remove("NOM_91796_banks")
Exemplo n.º 52
0
    def test_success(self):
        OutputWorkspaceName = "output_ws"
        Inputws = "%s, %s" % (self._input_ws_base.name(), self._input_good.name())

        alg_test = run_algorithm("TOFTOFMergeRuns",
                                 InputWorkspaces=Inputws,
                                 OutputWorkspace=OutputWorkspaceName)
        self.assertTrue(alg_test.isExecuted())

        wsoutput = AnalysisDataService.retrieve(OutputWorkspaceName)

        run_out = wsoutput.getRun()
        run_in = self._input_ws_base.getRun()
        self.assertEqual(run_out.getLogData('wavelength').value, run_in.getLogData('wavelength').value)
        self.assertEqual(run_out.getLogData('chopper_speed').value, run_in.getLogData('chopper_speed').value)
        self.assertEqual(run_out.getLogData('chopper_ratio').value, run_in.getLogData('chopper_ratio').value)
        self.assertEqual(run_out.getLogData('channel_width').value, run_in.getLogData('channel_width').value)
        self.assertEqual(run_out.getLogData('Ei').value, run_in.getLogData('Ei').value)
        self.assertEqual(run_out.getLogData('EPP').value, run_in.getLogData('EPP').value)
        self.assertEqual(run_out.getLogData('proposal_number').value, run_in.getLogData('proposal_number').value)
        self.assertEqual(run_out.getLogData('proposal_title').value, run_in.getLogData('proposal_title').value)
        self.assertEqual(run_out.getLogData('mode').value, run_in.getLogData('mode').value)
        self.assertEqual(run_out.getLogData('experiment_team').value, run_in.getLogData('experiment_team').value)

        run_in_good = self._input_good.getRun()
        self.assertEqual(run_out.getLogData('run_number').value,
                         str([run_in.getLogData('run_number').value, run_in_good.getLogData('run_number').value]))

        self.assertEqual(run_out.getLogData('temperature').value, float(run_in.getLogData('temperature').value))
        self.assertEqual(run_out.getLogData('duration').value,
                         float(run_in.getLogData('duration').value) + float(run_in_good.getLogData('duration').value))
        self.assertEqual(run_out.getLogData('run_start').value, run_in.getLogData('run_start').value)
        self.assertEqual(run_out.getLogData('run_end').value, run_in.getLogData('run_end').value)
        self.assertEqual(run_out.getLogData('full_channels').value, run_in.getLogData('full_channels').value)
        self.assertEqual(run_out.getLogData('monitor_counts').value, 2*int(run_in.getLogData('monitor_counts').value))
        # Dimension output workspace
        self.assertEqual(wsoutput.getNumberHistograms(), self._input_ws_base.getNumberHistograms())
        self.assertEqual(wsoutput.blocksize(), self._input_ws_base.blocksize())
        # check instrument
        self.assertEqual(wsoutput.getInstrument().getName(), "TOFTOF")

        AnalysisDataService.remove("output_ws")
Exemplo n.º 53
0
 def test_history_access(self):
     run_algorithm('CreateWorkspace',
                   OutputWorkspace='raw',
                   DataX=[1., 2., 3.],
                   DataY=[2., 3.],
                   DataE=[2., 3.],
                   UnitX='TOF')
     run_algorithm('Rebin',
                   InputWorkspace='raw',
                   Params=[1., 0.5, 3.],
                   OutputWorkspace='raw')
     raw = AnalysisDataService['raw']
     history = raw.getHistory()
     last = history.lastAlgorithm()
     self.assertEqual(last.name(), "Rebin")
     self.assertEqual(last.getPropertyValue("InputWorkspace"), "raw")
     first = history[0]
     self.assertEqual(first.name(), "CreateWorkspace")
     self.assertEqual(first.getPropertyValue("OutputWorkspace"), "raw")
     AnalysisDataService.remove('raw')
Exemplo n.º 54
0
    def test_create_with_1D_numpy_array(self):
        x = np.array([1., 2., 3., 4.])
        y = np.array([1., 2., 3.])
        e = np.sqrt(np.array([1., 2., 3.]))

        wksp = CreateWorkspace(DataX=x, DataY=y, DataE=e, NSpec=1, UnitX='TOF')
        self.assertTrue(isinstance(wksp, MatrixWorkspace))
        self.assertEqual(wksp.getNumberHistograms(), 1)

        self.assertEqual(len(wksp.readY(0)), len(y))
        self.assertEqual(len(wksp.readX(0)), len(x))
        self.assertEqual(len(wksp.readE(0)), len(e))

        for index in range(len(y)):
            self.assertEqual(wksp.readY(0)[index], y[index])
            self.assertEqual(wksp.readE(0)[index], e[index])
            self.assertEqual(wksp.readX(0)[index], x[index])
        # Last X value
        self.assertEqual(wksp.readX(0)[len(x) - 1], x[len(x) - 1])
        AnalysisDataService.remove("wksp")
Exemplo n.º 55
0
    def test_failed(self):
        """
        Failed tests because of missing keys or different values
        """
        OutputWorkspaceName = "output_ws"
        Inputws_badvalue = "%s, %s" % (self._input_ws_base.name(), self._input_bad_value.name())
        self.assertRaises(RuntimeError,
                          run_algorithm, 'TOFTOFMergeRuns',
                          InputWorkspaces=Inputws_badvalue,
                          OutputWorkspace=OutputWorkspaceName,
                          rethrow=True)

        Inputws_badentry = "%s, %s" % (self._input_ws_base.name(), self._input_bad_entry.name())
        self.assertRaises(RuntimeError,
                          run_algorithm, 'TOFTOFMergeRuns',
                          InputWorkspaces=Inputws_badentry,
                          OutputWorkspace=OutputWorkspaceName,
                          rethrow=True)

        if "output_ws" is not None:
            AnalysisDataService.remove("output_ws")
Exemplo n.º 56
0
 def _clean_up(self, base_name, number_of_workspaces):
     for index in range(1, number_of_workspaces + 1):
         workspace_name = base_name + str(index)
         monitor_name = workspace_name + "_monitors"
         AnalysisDataService.remove(workspace_name)
         AnalysisDataService.remove(monitor_name)
     AnalysisDataService.remove("80tubeCalibration_18-04-2016_r9330-9335")
Exemplo n.º 57
0
    def test_batch_reduction_on_multiperiod_file(self):
        # Arrange
        # Build the data information
        file_information_factory = SANSFileInformationFactory()
        file_information = file_information_factory.create_sans_file_information(
            "SANS2D0005512")
        data_builder = get_data_builder(SANSFacility.ISIS, file_information)
        data_builder.set_sample_scatter("SANS2D0005512")

        data_info = data_builder.build()

        # Get the rest of the state from the user file
        user_filename = "MASKSANS2Doptions.091A"
        user_file_parser = StateBuilder.new_instance(
            file_information=file_information,
            data_information=data_info,
            user_filename=user_filename)
        state = user_file_parser.get_all_states()
        # Set the reduction mode to LAB
        state.reduction.reduction_mode = ReductionMode.LAB

        # Act
        states = [state]
        self._run_batch_reduction(states, use_optimizations=False)

        # Assert
        # We only assert that the expected workspaces exist on the ADS
        expected_workspaces = [
            "5512_p{0}rear_1D_2.0_14.0Phi-45.0_45.0".format(i)
            for i in range(1, 14)
        ]
        for element in expected_workspaces:
            does_exist = AnalysisDataService.doesExist(element)
            self.assertTrue(does_exist,
                            msg="{0} was not found".format(element))

        # Clean up
        for element in expected_workspaces:
            AnalysisDataService.remove(element)
Exemplo n.º 58
0
    def runTest(self):
        UseCompatibilityMode()
        LARMOR()
        Set1D()
        Detector("DetectorBench")
        MaskFile('USER_LARMOR_151B_LarmorTeam_80tubes_BenchRot1p4_M4_r3699.txt')
        Gravity(True)
        AddRuns(('13065', '13065'), 'LARMOR', 'nxs', lowMem=True)

        AssignSample('13065-add.nxs')
        WavRangeReduction(2, 4, DefaultTrans)

        # Clean up
        for element in AnalysisDataService.getObjectNames():
            if AnalysisDataService.doesExist(element) and element != "13065p1rear_1D_2.0_4.0":
                AnalysisDataService.remove(element)

        paths = [os.path.join(config['defaultsave.directory'], 'LARMOR00013065-add.nxs'),
                 os.path.join(config['defaultsave.directory'], 'SANS2D00013065.log')]  # noqa
        for path in paths:
            if os.path.exists(path):
                os.remove(path)
Exemplo n.º 59
0
 def cleanup(self, nf):
   for iif in range(nf):
     AnalysisDataService.remove('sim{0}'.format(iif))
   AnalysisDataService.remove('targetW')
   # Remove the fitting results, if present
   for suffix in 'NormalisedCovarianceMatrix Parameters Workspace':
     if mtd.doesExist('targetW_{0}'.format(suffix)):
       AnalysisDataService.remove('targetW_{0}'.format(suffix))
Exemplo n.º 60
0
    def test_pickle_table_workspace(self):
        from mantid.kernel import V3D
        import pickle

        table = TableWorkspace()
        table.addColumn(type="int", name="index")
        table.addColumn(type="str", name="value")
        table.addColumn(type="V3D", name="position")

        values = (1, '10', V3D(0, 0, 1))
        table.addRow(values)
        values = (2, '100', V3D(1, 0, 0))
        table.addRow(values)

        p = pickle.dumps(table)
        table2 = pickle.loads(p)
        self.assertEqual(table.toDict(), table2.toDict())

        # Can we add it to the ADS
        name = "test_pickle_table_workspace"
        AnalysisDataService.add(name, table2)
        self.assertTrue(name in AnalysisDataService)
        AnalysisDataService.remove(name)