예제 #1
0
    def _create_peaks_workspace(self):
        """Create a dummy peaks workspace"""
        path = FileFinder.getFullPath("IDFs_for_UNIT_TESTING/MINITOPAZ_Definition.xml")
        inst = LoadEmptyInstrument(Filename=path)
        ws = CreatePeaksWorkspace(inst, 0)
        DeleteWorkspace(inst)
        SetUB(ws, 1, 1, 1, 90, 90, 90)

        # Add a bunch of random peaks that happen to fall on the
        # detetor bank defined in the IDF
        center_q = np.array([-5.1302,2.5651,3.71809])
        qs = []
        for i in np.arange(0, 1, 0.1):
            for j in np.arange(-0.5, 0, 0.1):
                q = center_q.copy()
                q[1] += j
                q[2] += i
                qs.append(q)

        # Add the peaks to the PeaksWorkspace with dummy values for intensity,
        # Sigma, and HKL
        for q in qs:
            peak = ws.createPeak(q)
            peak.setIntensity(100)
            peak.setSigmaIntensity(10)
            peak.setHKL(1, 1, 1)
            ws.addPeak(peak)

        return ws
예제 #2
0
 def test_basic_access(self):
     sampleWs = CreateSampleWorkspace()
     ws = CreatePeaksWorkspace(InstrumentWorkspace=sampleWs,NumberOfPeaks=1)
     peak = ws.getPeak(0)
     peak_shape = peak.getPeakShape()
     self.assertTrue(isinstance(peak_shape, PeakShape))
     self.assertEquals(peak_shape.shapeName(), "none")
예제 #3
0
 def test_basic_access(self):
     sampleWs = CreateSampleWorkspace()
     ws = CreatePeaksWorkspace(InstrumentWorkspace=sampleWs,NumberOfPeaks=1)
     peak = ws.getPeak(0)
     peak_shape = peak.getPeakShape()
     self.assertTrue(isinstance(peak_shape, PeakShape))
     self.assertEqual(peak_shape.shapeName(), "none")
예제 #4
0
    def _create_peaks_workspace(self):
        """Create a dummy peaks workspace"""
        path = FileFinder.getFullPath(
            "IDFs_for_UNIT_TESTING/MINITOPAZ_Definition.xml")
        inst = LoadEmptyInstrument(Filename=path)
        ws = CreatePeaksWorkspace(inst, 0)
        DeleteWorkspace(inst)
        SetUB(ws, 1, 1, 1, 90, 90, 90)

        # Add a bunch of random peaks that happen to fall on the
        # detetor bank defined in the IDF
        center_q = np.array([-5.1302, 2.5651, 3.71809])
        qs = []
        for i in np.arange(0, 1, 0.1):
            for j in np.arange(-0.5, 0, 0.1):
                q = center_q.copy()
                q[1] += j
                q[2] += i
                qs.append(q)

        # Add the peaks to the PeaksWorkspace with dummy values for intensity,
        # Sigma, and HKL
        for q in qs:
            peak = ws.createPeak(q)
            peak.setIntensity(100)
            peak.setSigmaIntensity(10)
            peak.setHKL(1, 1, 1)
            ws.addPeak(peak)

        return ws
예제 #5
0
    def setUp(self):
        # IPeak cannot currently be instatiated so this is a quick way
        # getting a handle to a peak object
        ws = CreateSimulationWorkspace("SXD", BinParams="1,1,10")
        peaks = CreatePeaksWorkspace(ws, 1)
        self._peak = peaks.getPeak(0)

        # tolerance for differences in q vectors that a recomputed
        # on every call.
        self._tolerance = 1e-2
예제 #6
0
    def setUp(self):
        # IPeak cannot currently be instatiated so this is a quick way
        # getting a handle to a peak object
        ws = CreateSimulationWorkspace("SXD", BinParams="1,1,10")
        peaks = CreatePeaksWorkspace(ws, 1)
        self._peak = peaks.getPeak(0)

        # tolerance for differences in q vectors that a recomputed
        # on every call.
        self._tolerance = 1e-2
예제 #7
0
    def test_lattice_accessors(self):
        instrument_ws = CreateSampleWorkspace()
        peaks = CreatePeaksWorkspace(instrument_ws, 0)
        SetUB(peaks, 1, 1, 1, 90, 90, 90)
        sample = peaks.sample()

        self.assertTrue(sample.hasOrientedLattice())
        self.assertTrue(
            isinstance(sample.getOrientedLattice(), OrientedLattice))
        sample.clearOrientedLattice()
        self.assertFalse(sample.hasOrientedLattice())
    def test_get_hkls(self):
        ws = CreateSimulationWorkspace("IRIS", BinParams="1,5,10")
        peaks = CreatePeaksWorkspace(ws, 2)
        reference = np.array([
            [1, 1, 2],
            [2, 1, 4],
        ])

        peak = peaks.getPeak(0)
        peak.setHKL(1, 1, 2)
        peak = peaks.getPeak(1)
        peak.setHKL(2, 1, 4)

        hkl = indexing.get_hkls(peaks)
        npt.assert_array_equal(hkl, reference)
예제 #9
0
    def test_get_hkls(self):
        ws = CreateSimulationWorkspace("IRIS", BinParams="1,5,10")
        peaks = CreatePeaksWorkspace(ws, 2)
        reference = np.array([
            [1, 1, 2],
            [2, 1, 4],
        ])

        peak = peaks.getPeak(0)
        peak.setHKL(1, 1, 2)
        peak = peaks.getPeak(1)
        peak.setHKL(2, 1, 4)

        hkl = indexing.get_hkls(peaks)
        npt.assert_array_equal(hkl, reference)
예제 #10
0
    def test_handles_inaccurate_goniometer(self):
        peaks1 = CreatePeaksWorkspace(InstrumentWorkspace=self.ws,
                                      NumberOfPeaks=0,
                                      OutputWorkspace="SXD_peaks3")
        peaks2 = CloneWorkspace(InputWorkspace=peaks1,
                                OutputWorkspace="SXD_peaks4")
        # set different gonio on each run
        rot = 5
        SetGoniometer(Workspace=peaks1, Axis0=f'{-rot},0,1,0,1')
        SetGoniometer(Workspace=peaks2, Axis0=f'{rot},0,1,0,1')
        # Add peaks at QLab corresponding to slightly different gonio rotations
        UB = np.diag([0.25, 0.25, 0.1])  # alatt = [4,4,10]
        for h in range(0, 3):
            for k in range(0, 3):
                hkl = np.array([h, k, 4])
                qlab = 2 * np.pi * np.matmul(
                    np.matmul(getR(-(rot + 1), [0, 1, 0]), UB), hkl)
                pk = peaks1.createPeak(qlab)
                peaks1.addPeak(pk)
                qlab = 2 * np.pi * np.matmul(
                    np.matmul(getR(rot + 1, [0, 1, 0]), UB), hkl)
                pk = peaks2.createPeak(qlab)
                peaks2.addPeak(pk)

        FindGlobalBMatrix(PeakWorkspaces=[peaks1, peaks2],
                          a=4.15,
                          b=3.95,
                          c=10,
                          alpha=88,
                          beta=88,
                          gamma=89,
                          Tolerance=0.15)

        # check lattice - shouldn't be effected by error in goniometer
        self.assert_lattice([peaks1, peaks2],
                            4.0,
                            4.0,
                            10.0,
                            90.0,
                            90.0,
                            90.0,
                            delta_latt=2e-2,
                            delta_angle=2.5e-1)
        self.assert_matrix([peaks1],
                           getBMatrix(peaks2),
                           getBMatrix,
                           delta=1e-10)  # should have same B matrix
        self.assert_matrix([peaks1, peaks2], np.eye(3), getUMatrix, delta=5e-2)
 def runTest(self):
     # Load raw data (bank 1)
     wsMD = LoadMD(
         "WISH38237_MD.nxs")  # default so doesn't get overwrite van
     # For each mod vec, predict and integrate peaks and combine
     qs = [(0.15, 0, 0.3), (-0.15, 0, 0.3)]
     all_pks = CreatePeaksWorkspace(InstrumentWorkspace=wsMD,
                                    NumberOfPeaks=0,
                                    OutputWorkspace="all_pks")
     LoadIsawUB(InputWorkspace=all_pks,
                Filename='Wish_Diffuse_Scattering_ISAW_UB.mat')
     # PredictPeaks
     parent = PredictPeaks(InputWorkspace=all_pks,
                           WavelengthMin=0.8,
                           WavelengthMax=9.3,
                           MinDSpacing=0.5,
                           ReflectionCondition="Primitive")
     self._pfps = []
     self._saved_files = []
     for iq, q in enumerate(qs):
         wsname = f'pfp_{iq}'
         PredictFractionalPeaks(Peaks=parent,
                                IncludeAllPeaksInRange=True,
                                Hmin=0,
                                Hmax=0,
                                Kmin=1,
                                Kmax=1,
                                Lmin=0,
                                Lmax=1,
                                ReflectionCondition='Primitive',
                                MaxOrder=1,
                                ModVector1=",".join([str(qi) for qi in q]),
                                FracPeaks=wsname)
         FilterPeaks(InputWorkspace=wsname,
                     OutputWorkspace=wsname,
                     FilterVariable='Wavelength',
                     FilterValue=9.3,
                     Operator='<')  # should get rid of one peak in q1 table
         FilterPeaks(InputWorkspace=wsname,
                     OutputWorkspace=wsname,
                     FilterVariable='Wavelength',
                     FilterValue=0.8,
                     Operator='>')
         IntegratePeaksMD(InputWorkspace=wsMD,
                          PeakRadius='0.1',
                          BackgroundInnerRadius='0.1',
                          BackgroundOuterRadius='0.15',
                          PeaksWorkspace=wsname,
                          OutputWorkspace=wsname,
                          IntegrateIfOnEdge=False,
                          UseOnePercentBackgroundCorrection=False)
         all_pks = CombinePeaksWorkspaces(LHSWorkspace=all_pks,
                                          RHSWorkspace=wsname)
         self._pfps.append(ADS.retrieve(wsname))
     self._filepath = os.path.join(config['defaultsave.directory'],
                                   'WISH_IntegratedSatellite.int')
     SaveReflections(InputWorkspace=all_pks,
                     Filename=self._filepath,
                     Format='Jana')
     self._all_pks = all_pks
예제 #12
0
    def test_HFIRCalculateGoniometer_HB3A_phi(self):
        omega = np.deg2rad(42)
        chi = np.deg2rad(-3)
        phi = np.deg2rad(23)
        R1 = np.array([
            [np.cos(omega), 0, -np.sin(omega)],  # omega 0,1,0,-1
            [0, 1, 0],
            [np.sin(omega), 0, np.cos(omega)]
        ])
        R2 = np.array([
            [np.cos(chi), np.sin(chi), 0],  # chi 0,0,1,-1
            [-np.sin(chi), np.cos(chi), 0],
            [0, 0, 1]
        ])
        R3 = np.array([
            [np.cos(phi), 0, -np.sin(phi)],  # phi 0,1,0,-1
            [0, 1, 0],
            [np.sin(phi), 0, np.cos(phi)]
        ])
        R = np.dot(np.dot(R1, R2), R3)

        wl = 1.54
        k = 2 * np.pi / wl
        theta = np.deg2rad(47)
        phi = np.deg2rad(13)

        q_lab = np.array([
            -np.sin(theta) * np.cos(phi), -np.sin(theta) * np.sin(phi),
            1 - np.cos(theta)
        ]) * k

        q_sample = np.dot(np.linalg.inv(R), q_lab)

        peaks = CreatePeaksWorkspace(OutputType="LeanElasticPeak",
                                     NumberOfPeaks=0)
        AddSampleLog(peaks, "Wavelength", str(wl), "Number")
        SetGoniometer(peaks, Axis0='42,0,1,0,-1',
                      Axis1='-3,0,0,1,-1')  # don't set phi

        p = peaks.createPeakQSample(q_sample)
        peaks.addPeak(p)

        HFIRCalculateGoniometer(peaks,
                                OverrideProperty=True,
                                InnerGoniometer=True)

        g = Goniometer()
        g.setR(peaks.getPeak(0).getGoniometerMatrix())
        YZY = g.getEulerAngles('YZY')
        self.assertAlmostEqual(YZY[0], -42, delta=1e-10)  # omega
        self.assertAlmostEqual(YZY[1], 3, delta=1e-10)  # chi
        self.assertAlmostEqual(YZY[2], -23, delta=1e-1)  # phi

        self.assertAlmostEqual(peaks.getPeak(0).getWavelength(),
                               1.54,
                               delta=1e-10)
예제 #13
0
    def test_finds_average_lattice_parameter(self):
        # create two peak tables with UB corresponding to different lattice constant, a
        peaks1 = CreatePeaksWorkspace(InstrumentWorkspace=self.ws,
                                      NumberOfPeaks=0,
                                      OutputWorkspace="SXD_peaks1")
        UB = np.diag([1.0 / 3.9, 0.25, 0.1])  # alatt = [3.9, 4, 10]
        SetUB(peaks1, UB=UB)
        peaks2 = CreatePeaksWorkspace(InstrumentWorkspace=self.ws,
                                      NumberOfPeaks=0,
                                      OutputWorkspace="SXD_peaks2")
        UB = np.diag([1.0 / 4.1, 0.25, 0.1])  # alatt = [4.1, 4, 10]
        SetUB(peaks2, UB=UB)
        # Add some peaks
        add_peaksHKL([peaks1, peaks2], range(0, 3), range(0, 3), 4)

        FindGlobalBMatrix(PeakWorkspaces=[peaks1, peaks2],
                          a=4.1,
                          b=4.2,
                          c=10,
                          alpha=88,
                          beta=88,
                          gamma=89,
                          Tolerance=0.15)

        # check lattice  - should have average a=4.0
        self.assert_lattice([peaks1, peaks2],
                            4.0,
                            4.0,
                            10.0,
                            90.0,
                            90.0,
                            90.0,
                            delta_latt=5e-2,
                            delta_angle=2.5e-1)
        self.assert_matrix([peaks1],
                           getBMatrix(peaks2),
                           getBMatrix,
                           delta=1e-10)  # should have same B matrix
        self.assert_matrix([peaks1, peaks2], np.eye(3), getUMatrix, delta=5e-2)
예제 #14
0
    def test_performs_correct_transform_to_ensure_consistent_indexing(self):
        # create peaks tables
        peaks1 = CreatePeaksWorkspace(InstrumentWorkspace=self.ws,
                                      NumberOfPeaks=0,
                                      OutputWorkspace="SXD_peaks7")
        UB = np.diag([0.2, 0.25, 0.1])
        SetUB(peaks1, UB=UB)
        # Add some peaks
        add_peaksHKL([peaks1], range(0, 3), range(0, 3), 4)
        # Clone ws and transform
        peaks2 = CloneWorkspace(InputWorkspace=peaks1,
                                OutputWorkspace="SXD_peaks8")
        peaks2.removePeak(
            0)  # peaks1 will have most peaks indexed so will used as reference
        transform = np.array([[0, 1, 0], [1, 0, 0], [0, 0, -1]])
        TransformHKL(PeaksWorkspace=peaks2,
                     HKLTransform=transform,
                     FindError=False)

        FindGlobalBMatrix(PeakWorkspaces=[peaks1, peaks2],
                          a=4.15,
                          b=3.95,
                          c=10,
                          alpha=88,
                          beta=88,
                          gamma=89,
                          Tolerance=0.15)

        # check lattice - shouldn't be effected by error in goniometer
        self.assert_lattice([peaks1, peaks2],
                            5.0,
                            4.0,
                            10.0,
                            90.0,
                            90.0,
                            90.0,
                            delta_latt=5e-2,
                            delta_angle=2.5e-1)
        self.assert_matrix([peaks1],
                           getBMatrix(peaks2),
                           getBMatrix,
                           delta=1e-10)  # should have same B matrix
        self.assert_matrix([peaks1, peaks2], np.eye(3), getUMatrix, delta=5e-2)
예제 #15
0
    def test_requires_more_than_one_peak_workspace(self):
        peaks1 = CreatePeaksWorkspace(InstrumentWorkspace=self.ws,
                                      NumberOfPeaks=0,
                                      OutputWorkspace="SXD_peaks4")
        UB = np.diag([0.25, 0.25, 0.1])
        SetUB(peaks1, UB=UB)
        # Add some peaks
        add_peaksHKL([peaks1], range(0, 3), range(0, 3), 4)

        alg = create_algorithm('FindGlobalBMatrix',
                               PeakWorkspaces=[peaks1],
                               a=4.1,
                               b=4.2,
                               c=10,
                               alpha=88,
                               beta=88,
                               gamma=89,
                               Tolerance=0.15)

        with self.assertRaises(RuntimeError):
            alg.execute()
예제 #16
0
    def test_peak_workspaces_need_at_least_six_peaks_each(self):
        peaks1 = CreatePeaksWorkspace(InstrumentWorkspace=self.ws,
                                      NumberOfPeaks=0,
                                      OutputWorkspace="SXD_peaks5")
        UB = np.diag([0.25, 0.25, 0.1])
        SetUB(peaks1, UB=UB)
        # Add 5 peaks
        add_peaksHKL([peaks1], range(0, 5), [0], 4)
        peaks2 = CloneWorkspace(InputWorkspace=peaks1,
                                OutputWorkspace="SXD_peaks6")

        alg = create_algorithm('FindGlobalBMatrix',
                               PeakWorkspaces=[peaks1, peaks2],
                               a=4.1,
                               b=4.2,
                               c=10,
                               alpha=88,
                               beta=88,
                               gamma=89,
                               Tolerance=0.15)

        with self.assertRaises(RuntimeError):
            alg.execute()
예제 #17
0
    def test_HFIRCalculateGoniometer_HB2C_omega(self):
        omega = np.deg2rad(42)

        R = np.array([[np.cos(omega), 0, np.sin(omega)], [0, 1, 0],
                      [-np.sin(omega), 0, np.cos(omega)]])

        wl = 1.54
        k = 2 * np.pi / wl
        theta = np.deg2rad(47)
        phi = np.deg2rad(13)

        q_lab = np.array([
            -np.sin(theta) * np.cos(phi), -np.sin(theta) * np.sin(phi),
            1 - np.cos(theta)
        ]) * k

        q_sample = np.dot(np.linalg.inv(R), q_lab)

        peaks = CreatePeaksWorkspace(OutputType="LeanElasticPeak",
                                     NumberOfPeaks=0)

        p = peaks.createPeakQSample(q_sample)
        peaks.addPeak(p)

        HFIRCalculateGoniometer(peaks, wl)

        g = Goniometer()
        g.setR(peaks.getPeak(0).getGoniometerMatrix())
        YZY = g.getEulerAngles('YZY')
        self.assertAlmostEqual(YZY[0], 42, delta=1e-10)  # omega
        self.assertAlmostEqual(YZY[1], 0, delta=1e-10)  # chi
        self.assertAlmostEqual(YZY[2], 0, delta=1e-10)  # phi

        self.assertAlmostEqual(peaks.getPeak(0).getWavelength(),
                               1.54,
                               delta=1e-10)
 def runTest(self):
     # Load Empty Instrument
     ws = LoadEmptyInstrument(InstrumentName='WISH', OutputWorkspace='WISH')
     axis = ws.getAxis(0)
     axis.setUnit("TOF")  # need this to add peak to table
     # CreatePeaksWorkspace with peaks in specific detectors
     peaks = CreatePeaksWorkspace(InstrumentWorkspace=ws,
                                  NumberOfPeaks=0,
                                  OutputWorkspace='peaks')
     AddPeak(PeaksWorkspace=peaks,
             RunWorkspace=ws,
             TOF=20000,
             DetectorID=1707204,
             Height=521,
             BinCount=0)  # pixel in first tube in panel 1
     AddPeak(PeaksWorkspace=peaks,
             RunWorkspace=ws,
             TOF=20000,
             DetectorID=1400510,
             Height=1,
             BinCount=0)  # pixel at top of a central tube in panel 1
     AddPeak(PeaksWorkspace=peaks,
             RunWorkspace=ws,
             TOF=20000,
             DetectorID=1408202,
             Height=598,
             BinCount=0)  # pixel in middle of bank 1 (not near edge)
     AddPeak(PeaksWorkspace=peaks,
             RunWorkspace=ws,
             TOF=20000,
             DetectorID=1100173,
             Height=640,
             BinCount=0)  # pixel in last tube of panel 1 (next to panel 2)
     # create dummy MD workspace for integration (don't need data as checking peak shape)
     MD = CreateMDWorkspace(Dimensions='3',
                            Extents='-1,1,-1,1,-1,1',
                            Names='Q_lab_x,Q_lab_y,Q_lab_z',
                            Units='U,U,U',
                            Frames='QLab,QLab,QLab',
                            SplitInto='2',
                            SplitThreshold='50')
     # Integrate peaks masking all pixels at tube end (built into IntegratePeaksMD)
     self._peaks_pixels = IntegratePeaksMD(InputWorkspace=MD,
                                           PeakRadius='0.02',
                                           PeaksWorkspace=peaks,
                                           IntegrateIfOnEdge=False,
                                           OutputWorkspace='peaks_pixels',
                                           MaskEdgeTubes=False)
     # Apply masking to specific tubes next to beam in/out (subset of all edge tubes) and integrate again
     MaskBTP(Workspace='peaks', Bank='5-6', Tube='152')
     MaskBTP(Workspace='peaks', Bank='1,10', Tube='1')
     self._peaks_pixels_beamTubes = IntegratePeaksMD(
         InputWorkspace='MD',
         PeakRadius='0.02',
         PeaksWorkspace=peaks,
         IntegrateIfOnEdge=False,
         OutputWorkspace='peaks_pixels_beamTubes',
         MaskEdgeTubes=False)
     # Integrate masking all edge tubes
     self._peaks_pixels_edgeTubes = IntegratePeaksMD(
         InputWorkspace='MD',
         PeakRadius='0.02',
         PeaksWorkspace='peaks',
         IntegrateIfOnEdge=False,
         OutputWorkspace='peaks_pixels_edgeTubes',
         MaskEdgeTubes=True)
예제 #19
0
    def PyExec(self):
        input_workspaces, peak_workspaces = self._expand_groups()
        output_workspace_name = self.getPropertyValue("OutputWorkspace")

        peak_radius = self.getProperty("PeakRadius").value
        inner_radius = self.getProperty("BackgroundInnerRadius").value
        outer_radius = self.getProperty("BackgroundOuterRadius").value

        remove_0_intensity = self.getProperty("RemoveZeroIntensity").value
        use_lorentz = self.getProperty("ApplyLorentz").value

        multi_ws = len(input_workspaces) > 1

        output_workspaces = []

        for input_ws, peak_ws in zip(input_workspaces, peak_workspaces):
            if multi_ws:
                peaks_ws_name = input_ws + '_' + output_workspace_name
                output_workspaces.append(peaks_ws_name)
            else:
                peaks_ws_name = output_workspace_name

            IntegratePeaksMD(InputWorkspace=input_ws,
                             PeakRadius=peak_radius,
                             BackgroundInnerRadius=inner_radius,
                             BackgroundOuterRadius=outer_radius,
                             PeaksWorkspace=peak_ws,
                             OutputWorkspace=peaks_ws_name)

        if multi_ws:
            peaks_ws_name = output_workspace_name
            CreatePeaksWorkspace(
                InstrumentWorkspace=input_workspaces[0],
                NumberOfPeaks=0,
                OutputWorkspace=peaks_ws_name,
                OutputType=mtd[peak_workspaces[0]].id().replace(
                    'sWorkspace', ''))
            CopySample(InputWorkspace=output_workspaces[0],
                       OutputWorkspace=peaks_ws_name,
                       CopyName=False,
                       CopyMaterial=False,
                       CopyEnvironment=False,
                       CopyShape=False,
                       CopyLattice=True)
            for peak_ws in output_workspaces:
                CombinePeaksWorkspaces(peaks_ws_name,
                                       peak_ws,
                                       OutputWorkspace=peaks_ws_name)
                DeleteWorkspace(peak_ws)

        if use_lorentz:
            # Apply Lorentz correction:
            peaks = AnalysisDataService[peaks_ws_name]
            for p in range(peaks.getNumberPeaks()):
                peak = peaks.getPeak(p)
                lorentz = abs(
                    np.sin(peak.getScattering() * np.cos(peak.getAzimuthal())))
                peak.setIntensity(peak.getIntensity() * lorentz)

        if remove_0_intensity:
            FilterPeaks(InputWorkspace=peaks_ws_name,
                        OutputWorkspace=peaks_ws_name,
                        FilterVariable='Intensity',
                        FilterValue=0,
                        Operator='>')

        # Write output only if a file path was provided
        if not self.getProperty("OutputFile").isDefault:
            out_format = self.getProperty("OutputFormat").value
            filename = self.getProperty("OutputFile").value

            if out_format == "SHELX":
                SaveHKL(InputWorkspace=peaks_ws_name,
                        Filename=filename,
                        DirectionCosines=True,
                        OutputWorkspace="__tmp")
                DeleteWorkspace("__tmp")
            elif out_format == "Fullprof":
                SaveReflections(InputWorkspace=peaks_ws_name,
                                Filename=filename,
                                Format="Fullprof")
            else:
                # This shouldn't happen
                RuntimeError("Invalid output format given")

        self.setProperty("OutputWorkspace", AnalysisDataService[peaks_ws_name])
예제 #20
0
    def PyExec(self):
        input_workspaces = self._expand_groups()
        output_workspace_name = self.getPropertyValue("OutputWorkspace")

        cell_type = self.getProperty("CellType").value
        centering = self.getProperty("Centering").value

        npeaks = self.getProperty("MaxPeaks").value
        dist_thresh = self.getProperty("PeakDistanceThreshold").value
        density_thresh = self.getProperty("DensityThresholdFactor").value

        lattice = self.getProperty("UseLattice").value
        if lattice:
            a = self.getProperty("LatticeA").value
            b = self.getProperty("LatticeB").value
            c = self.getProperty("LatticeC").value
            alpha = self.getProperty("LatticeAlpha").value
            beta = self.getProperty("LatticeBeta").value
            gamma = self.getProperty("LatticeGamma").value

        # Whether to use the inner goniometer depending on omega and phi in sample logs
        use_inner = False

        # Initially set the back-up wavelength to use. This is overwritten if found in sample logs.
        wavelength = None
        if not self.getProperty("Wavelength").isDefault:
            wavelength = self.getProperty("Wavelength").value

        multi_ws = len(input_workspaces) > 1
        output_workspaces = []
        for input_ws in input_workspaces:
            if multi_ws:
                peaks_ws_name = input_ws + '_' + output_workspace_name
                output_workspaces.append(peaks_ws_name)
            else:
                peaks_ws_name = output_workspace_name
            ws = AnalysisDataService[input_ws]
            if ws.getNumExperimentInfo() == 0:
                # Warn if we could extract a wavelength from the workspace
                raise RuntimeWarning("No experiment info was found in input '{}'".format(ws.getName()))
            else:
                exp_info = ws.getExperimentInfo(0)
                if exp_info.run().hasProperty("wavelength"):
                    wavelength = exp_info.run().getProperty("wavelength").value
                if exp_info.run().hasProperty("omega"):
                    if np.isclose(exp_info.run().getTimeAveragedStd("omega"), 0.0):
                        use_inner = True
                if exp_info.run().hasProperty("phi"):
                    if np.isclose(exp_info.run().getTimeAveragedStd("phi"), 0.0):
                        use_inner = False
                self.log().information("Using inner goniometer: {}".format(use_inner))

            FindPeaksMD(InputWorkspace=input_ws,
                        PeakDistanceThreshold=dist_thresh,
                        DensityThresholdFactor=density_thresh,
                        CalculateGoniometerForCW=True,
                        Wavelength=wavelength,
                        FlipX=True,
                        InnerGoniometer=use_inner,
                        MaxPeaks=npeaks,
                        OutputWorkspace=peaks_ws_name)

        if multi_ws:
            peaks_ws_name = '__tmp_peaks_ws'
            CreatePeaksWorkspace(InstrumentWorkspace=input_workspaces[0],
                                 NumberOfPeaks=0,
                                 OutputWorkspace=peaks_ws_name)
            for peak_ws in output_workspaces:
                CombinePeaksWorkspaces(peaks_ws_name, peak_ws, OutputWorkspace=peaks_ws_name)

        if lattice:
            FindUBUsingLatticeParameters(PeaksWorkspace=peaks_ws_name, a=a, b=b, c=c, alpha=alpha, beta=beta, gamma=gamma)
        else:
            FindUBUsingFFT(PeaksWorkspace=peaks_ws_name, MinD=3, MaxD=20)

        ShowPossibleCells(PeaksWorkspace=peaks_ws_name)
        SelectCellOfType(PeaksWorkspace=peaks_ws_name, CellType=cell_type, Centering=centering, Apply=True)
        OptimizeLatticeForCellType(PeaksWorkspace=peaks_ws_name, CellType=cell_type, Apply=True)

        if multi_ws:
            for out_ws in output_workspaces:
                CopySample(InputWorkspace=peaks_ws_name,
                           OutputWorkspace=out_ws,
                           CopyName=False,
                           CopyMaterial=False,
                           CopyEnvironment=False,
                           CopyShape=False,
                           CopyLattice=True)
                IndexPeaks(PeaksWorkspace=out_ws)

            GroupWorkspaces(output_workspaces, OutputWorkspace=output_workspace_name)
            DeleteWorkspace(peaks_ws_name)

        self.setProperty("OutputWorkspace", AnalysisDataService[output_workspace_name])
예제 #21
0
    def test_HKL(self):
        md = CreateMDWorkspace(Dimensions=3,
                               Extents='0,10,0,10,0,10',
                               Names='H,K,L',
                               Units='r.l.u.,r.l.u.,r.l.u.',
                               Frames='HKL,HKL,HKL')

        pw_name = 'peaks_add_delete_test'
        CreatePeaksWorkspace(OutputType='LeanElasticPeak',
                             NUmberOfPeaks=0,
                             OutputWorkspace=pw_name)
        SetUB(pw_name, 2 * np.pi, 2 * np.pi, 4 * np.pi, u='0,0,1', v='1,0,0')

        self.assertEqual(mtd[pw_name].getNumberPeaks(), 0)

        sliceViewer = SliceViewer(md)

        # select z=3.0 slice
        sliceViewer.view.dimensions.set_slicepoint((None, None, 3.0))

        # overlay_peaks_workspaces
        sliceViewer._create_peaks_presenter_if_necessary(
        ).overlay_peaksworkspaces([pw_name])

        # click the "Add Peaks" button
        sliceViewer.view.peaks_view.peak_actions_view.ui.add_peaks_button.click(
        )

        # click on 3 different points on the canvas
        sliceViewer.canvas_clicked(
            Mock(inaxes=True, xdata=1.0,
                 ydata=2.0))  # should add a peak at HKL=(1, 2, 3)
        sliceViewer.canvas_clicked(
            Mock(inaxes=True, xdata=2.0,
                 ydata=2.0))  # should add a peak at HKL=(2, 2, 3)
        sliceViewer.canvas_clicked(
            Mock(inaxes=True, xdata=1.5,
                 ydata=1.5))  # should add a peak at HKL=(1.5, 1.5, 3)

        # peaks should be added
        self.assertEqual(mtd[pw_name].getNumberPeaks(), 3)

        # (1, 2, 3)
        peak = mtd[pw_name].getPeak(0)
        q_sample = peak.getQSampleFrame()
        self.assertAlmostEqual(q_sample[0], 1.0, delta=1e-10)
        self.assertAlmostEqual(q_sample[1], 2.0, delta=1e-10)
        self.assertAlmostEqual(q_sample[2], 1.5, delta=1e-10)
        self.assertAlmostEqual(peak.getH(), 1, delta=1e-10)
        self.assertAlmostEqual(peak.getK(), 2, delta=1e-10)
        self.assertAlmostEqual(peak.getL(), 3, delta=1e-10)

        # (2, 2, 3)
        peak = mtd[pw_name].getPeak(1)
        q_sample = peak.getQSampleFrame()
        self.assertAlmostEqual(q_sample[0], 2.0, delta=1e-10)
        self.assertAlmostEqual(q_sample[1], 2.0, delta=1e-10)
        self.assertAlmostEqual(q_sample[2], 1.5, delta=1e-10)
        self.assertAlmostEqual(peak.getH(), 2, delta=1e-10)
        self.assertAlmostEqual(peak.getK(), 2, delta=1e-10)
        self.assertAlmostEqual(peak.getL(), 3, delta=1e-10)

        # (1.5, 1.5, 3)
        peak = mtd[pw_name].getPeak(2)
        q_sample = peak.getQSampleFrame()
        self.assertAlmostEqual(q_sample[0], 1.5, delta=1e-10)
        self.assertAlmostEqual(q_sample[1], 1.5, delta=1e-10)
        self.assertAlmostEqual(q_sample[2], 1.5, delta=1e-10)
        self.assertAlmostEqual(peak.getH(), 1.5, delta=1e-10)
        self.assertAlmostEqual(peak.getK(), 1.5, delta=1e-10)
        self.assertAlmostEqual(peak.getL(), 3, delta=1e-10)

        # click the "Remove Peaks" button
        sliceViewer.view.peaks_view.peak_actions_view.ui.remove_peaks_button.click(
        )

        sliceViewer.canvas_clicked(Mock(
            inaxes=True, xdata=2.0,
            ydata=1.9))  # should remove the peak closest to HKL=(2, 1.9, 3)

        self.assertEqual(mtd[pw_name].getNumberPeaks(), 2)

        # should have deleted the (2, 2, 3) peak, leaving (1, 2, 3) and (1.5, 1.5, 3)

        # (1, 2, 3)
        peak = mtd[pw_name].getPeak(0)
        q_sample = peak.getQSampleFrame()
        self.assertAlmostEqual(q_sample[0], 1.0, delta=1e-10)
        self.assertAlmostEqual(q_sample[1], 2.0, delta=1e-10)
        self.assertAlmostEqual(q_sample[2], 1.5, delta=1e-10)
        self.assertAlmostEqual(peak.getH(), 1, delta=1e-10)
        self.assertAlmostEqual(peak.getK(), 2, delta=1e-10)
        self.assertAlmostEqual(peak.getL(), 3, delta=1e-10)

        # (1.5, 1.5, 3)
        peak = mtd[pw_name].getPeak(1)
        q_sample = peak.getQSampleFrame()
        self.assertAlmostEqual(q_sample[0], 1.5, delta=1e-10)
        self.assertAlmostEqual(q_sample[1], 1.5, delta=1e-10)
        self.assertAlmostEqual(q_sample[2], 1.5, delta=1e-10)
        self.assertAlmostEqual(peak.getH(), 1.5, delta=1e-10)
        self.assertAlmostEqual(peak.getK(), 1.5, delta=1e-10)
        self.assertAlmostEqual(peak.getL(), 3, delta=1e-10)
예제 #22
0
R = np.array([[np.cos(omega), 0, np.sin(omega)], [0, 1, 0],
              [-np.sin(omega), 0, np.cos(omega)]])

wl = 1.54
k = 2 * np.pi / wl
theta = np.deg2rad(47)
phi = np.deg2rad(13)

q_lab = np.array([
    -np.sin(theta) * np.cos(phi), -np.sin(theta) * np.sin(phi),
    1 - np.cos(theta)
]) * k

q_sample = np.dot(np.linalg.inv(R), q_lab)

peaks = CreatePeaksWorkspace(OutputType="LeanElasticPeak", NumberOfPeaks=0)

p = peaks.createPeakQSample(q_sample)
peaks.addPeak(p)

HFIRCalculateGoniometer(peaks, wl, OverrideProperty=True, InnerGoniometer=True)

g = Goniometer()
g.setR(peaks.getPeak(0).getGoniometerMatrix())
print(g.getEulerAngles('YZY'))
assert np.isclose(g.getEulerAngles('YZY')[0], 42)

chi = np.deg2rad(-3)
phi = np.deg2rad(23)

R1 = np.array([
예제 #23
0
    def test_Q_Sample(self):
        md = CreateMDWorkspace(Dimensions=3,
                               Extents='0,10,0,10,0,10',
                               Names='x,y,z',
                               Units='r.l.u.,r.l.u.,r.l.u.',
                               Frames='QSample,QSample,QSample')

        pw_name = 'peaks_add_delete_test'
        CreatePeaksWorkspace(OutputType='LeanElasticPeak',
                             NUmberOfPeaks=0,
                             OutputWorkspace=pw_name)

        self.assertEqual(mtd[pw_name].getNumberPeaks(), 0)

        sliceViewer = SliceViewer(md)

        # select z=3.0 slice
        sliceViewer.view.dimensions.set_slicepoint((None, None, 3.0))

        event = Mock(inaxes=True, xdata=1.0, ydata=2.0)

        sliceViewer.canvas_clicked(event)

        # nothing should happen since peaksviewer isn't active
        self.assertEqual(mtd[pw_name].getNumberPeaks(), 0)

        # overlay_peaks_workspaces
        sliceViewer._create_peaks_presenter_if_necessary(
        ).overlay_peaksworkspaces([pw_name])

        sliceViewer.canvas_clicked(event)

        # nothing should happen since add/remove peak action not selected
        self.assertEqual(mtd[pw_name].getNumberPeaks(), 0)

        # click the "Add Peaks" button
        sliceViewer.view.peaks_view.peak_actions_view.ui.add_peaks_button.click(
        )

        sliceViewer.canvas_clicked(event)  # should add a peak at (1, 2, 3)

        # peak should now be added
        self.assertEqual(mtd[pw_name].getNumberPeaks(), 1)
        peak = mtd[pw_name].getPeak(0)
        q_sample = peak.getQSampleFrame()
        self.assertAlmostEqual(q_sample[0], 1.0, delta=1e-10)
        self.assertAlmostEqual(q_sample[1], 2.0, delta=1e-10)
        self.assertAlmostEqual(q_sample[2], 3.0, delta=1e-10)
        self.assertEqual(peak.getH(), 0)
        self.assertEqual(peak.getK(), 0)
        self.assertEqual(peak.getL(), 0)

        # change to x-z slice, y=4, check that the transform is working
        sliceViewer.view.dimensions.dims[2].y_clicked()
        sliceViewer.view.dimensions.set_slicepoint((None, 4.0, None))

        sliceViewer.canvas_clicked(event)  # should add a peak at (1, 4, 2)

        self.assertEqual(mtd[pw_name].getNumberPeaks(), 2)
        peak = mtd[pw_name].getPeak(1)
        q_sample = peak.getQSampleFrame()
        self.assertAlmostEqual(q_sample[0], 1.0, delta=1e-10)
        self.assertAlmostEqual(q_sample[1], 4.0, delta=1e-10)
        self.assertAlmostEqual(q_sample[2], 2.0, delta=1e-10)
        self.assertEqual(peak.getH(), 0)
        self.assertEqual(peak.getK(), 0)
        self.assertEqual(peak.getL(), 0)

        # click the "Remove Peaks" button
        sliceViewer.view.peaks_view.peak_actions_view.ui.remove_peaks_button.click(
        )

        sliceViewer.view.dimensions.set_slicepoint((None, 0.0, None))
        event = Mock(inaxes=True, xdata=1.0, ydata=1.0)
        sliceViewer.canvas_clicked(
            event)  # should remove the peak closest to (1, 0, 1)

        self.assertEqual(mtd[pw_name].getNumberPeaks(), 1)

        # should be left with the seconds peak that was added
        peak = mtd[pw_name].getPeak(0)
        q_sample = peak.getQSampleFrame()
        self.assertAlmostEqual(q_sample[0], 1.0, delta=1e-10)
        self.assertAlmostEqual(q_sample[1], 4.0, delta=1e-10)
        self.assertAlmostEqual(q_sample[2], 2.0, delta=1e-10)
        self.assertEqual(peak.getH(), 0)
        self.assertEqual(peak.getK(), 0)
        self.assertEqual(peak.getL(), 0)

        # remove another peaks, should be none remaining
        sliceViewer.canvas_clicked(
            event)  # should remove the peak closest to (1, 0, 1)
        self.assertEqual(mtd[pw_name].getNumberPeaks(), 0)
예제 #24
0
    def PyExec(self):
        # create peaks workspace to store linked peaks
        linked_peaks = CreatePeaksWorkspace(
            InstrumentWorkspace=self._workspace,
            NumberOfPeaks=0,
            StoreInADS=False)

        # create peaks table to store linked predicted peaks
        linked_peaks_predicted = CreatePeaksWorkspace(
            InstrumentWorkspace=self._workspace,
            NumberOfPeaks=0,
            StoreInADS=False)

        for m in range(0, self._iterations):
            if m == 0:
                predictor = self._predicted_peaks
            if m > 0:
                predictor = linked_peaks_predicted

            qtol_var = self._qtol * self._qdecrement**m
            num_peaks_var = self._num_peaks + self._peak_increment * m

            # add q_lab and dpsacing values of found peaks to a list
            qlabs_observed = np.array(self._observed_peaks.column("QLab"))
            dspacings_observed = np.array(
                self._observed_peaks.column("DSpacing"))

            # sort the predicted peaks from largest to smallest dspacing
            qlabs_predicted = np.array(predictor.column("QLab"))
            dspacings_predicted = np.array(predictor.column("DSpacing"))

            # get the indexing list that sorts dspacing from largest to
            # smallest
            hkls = np.array([[p.getH(), p.getK(), p.getL()]
                             for p in predictor])
            idx = dspacings_predicted.argsort()[::-1]
            HKL_predicted = hkls[idx, :]

            # sort q, d and h, k, l by this indexing
            qlabs_predicted = qlabs_predicted[idx]
            dspacings_predicted = dspacings_predicted[idx]

            q_ordered = qlabs_predicted[:num_peaks_var]
            d_ordered = dspacings_predicted[:num_peaks_var]
            HKL_ordered = HKL_predicted[:num_peaks_var]

            # loop through the ordered find peaks, compare q and d to each
            # predicted peak if the q and d values of a found peak match a
            # predicted peak within tolerance, the found peak inherits
            # the HKL of the predicted peak
            for i in range(len(qlabs_observed)):
                qx_obs, qy_obs, qz_obs = qlabs_observed[i]
                q_obs = V3D(qx_obs, qy_obs, qz_obs)
                p_obs = linked_peaks.createPeak(q_obs)
                d_obs = dspacings_observed[i]

                for j in range(len(q_ordered)):
                    qx_pred, qy_pred, qz_pred = q_ordered[j]
                    d_pred = d_ordered[j]

                    if (qx_pred - qtol_var <= qx_obs <= qx_pred + qtol_var and
                            qy_pred - qtol_var <= qy_obs <= qy_pred + qtol_var
                            and
                            qz_pred - qtol_var <= qz_obs <= qz_pred + qtol_var
                            and d_pred - self._dtol <= d_obs <=
                            d_pred + self._dtol):
                        h, k, l = HKL_ordered[j]
                        p_obs.setHKL(h, k, l)
                        linked_peaks.addPeak(p_obs)

            # Clean up peaks where H == K == L == 0
            linked_peaks = FilterPeaks(linked_peaks,
                                       FilterVariable="h^2+k^2+l^2",
                                       Operator="!=",
                                       FilterValue="0")

            # force UB on linked_peaks using known lattice parameters
            CalculateUMatrix(PeaksWorkspace=linked_peaks,
                             a=self._a,
                             b=self._b,
                             c=self._c,
                             alpha=self._alpha,
                             beta=self._beta,
                             gamma=self._gamma,
                             StoreInADS=False)

            # new linked predicted peaks
            linked_peaks_predicted = PredictPeaks(
                InputWorkspace=linked_peaks,
                WavelengthMin=self._wavelength_min,
                WavelengthMax=self._wavelength_max,
                MinDSpacing=self._min_dspacing,
                MaxDSpacing=self._max_dspacing,
                ReflectionCondition=self._reflection_condition,
                StoreInADS=False)

        # clean up
        self.setProperty("LinkedPeaks", linked_peaks)
        self.setProperty("LinkedPredictedPeaks", linked_peaks_predicted)
        if mtd.doesExist("linked_peaks"):
            DeleteWorkspace(linked_peaks)
        if mtd.doesExist("linked_peaks_predicted"):
            DeleteWorkspace(linked_peaks_predicted)
        if self._delete_ws:
            DeleteWorkspace(self._workspace)
예제 #25
0
    def runTest(self):
        S = np.random.random(32 * 240 * 100)

        ConvertWANDSCDtoQTest_data = CreateMDHistoWorkspace(
            Dimensionality=3,
            Extents='0.5,32.5,0.5,240.5,0.5,100.5',
            SignalInput=S.ravel('F'),
            ErrorInput=np.sqrt(S.ravel('F')),
            NumberOfBins='32,240,100',
            Names='y,x,scanIndex',
            Units='bin,bin,number')

        ConvertWANDSCDtoQTest_dummy = CreateSingleValuedWorkspace()
        LoadInstrument(ConvertWANDSCDtoQTest_dummy,
                       InstrumentName='WAND',
                       RewriteSpectraMap=False)

        ConvertWANDSCDtoQTest_data.addExperimentInfo(
            ConvertWANDSCDtoQTest_dummy)

        log = FloatTimeSeriesProperty('s1')
        for t, v in zip(range(100), np.arange(0, 50, 0.5)):
            log.addValue(t, v)
        ConvertWANDSCDtoQTest_data.getExperimentInfo(0).run()['s1'] = log
        ConvertWANDSCDtoQTest_data.getExperimentInfo(0).run().addProperty(
            'duration', [60.] * 100, True)
        ConvertWANDSCDtoQTest_data.getExperimentInfo(0).run().addProperty(
            'monitor_count', [120000.] * 100, True)
        ConvertWANDSCDtoQTest_data.getExperimentInfo(0).run().addProperty(
            'twotheta', list(np.linspace(np.pi * 2 / 3, 0, 240).repeat(32)),
            True)
        ConvertWANDSCDtoQTest_data.getExperimentInfo(0).run().addProperty(
            'azimuthal', list(np.tile(np.linspace(-0.15, 0.15, 32), 240)),
            True)

        peaks = CreatePeaksWorkspace(NumberOfPeaks=0,
                                     OutputType='LeanElasticPeak')

        SetUB(ConvertWANDSCDtoQTest_data,
              5,
              5,
              7,
              90,
              90,
              120,
              u=[-1, 0, 1],
              v=[1, 0, 1])
        SetGoniometer(ConvertWANDSCDtoQTest_data,
                      Axis0='s1,0,1,0,1',
                      Average=False)

        CopySample(InputWorkspace=ConvertWANDSCDtoQTest_data,
                   OutputWorkspace=peaks,
                   CopyName=False,
                   CopyMaterial=False,
                   CopyEnvironment=False,
                   CopyShape=False,
                   CopyLattice=True)

        Q = ConvertWANDSCDtoQ(InputWorkspace=ConvertWANDSCDtoQTest_data,
                              UBWorkspace=peaks,
                              Wavelength=1.486,
                              Frame='HKL',
                              Uproj='1,1,0',
                              Vproj='-1,1,0',
                              BinningDim0='-6.04,6.04,151',
                              BinningDim1='-6.04,6.04,151',
                              BinningDim2='-6.04,6.04,151')

        data_norm = ConvertHFIRSCDtoMDE(ConvertWANDSCDtoQTest_data,
                                        Wavelength=1.486,
                                        MinValues='-6.04,-6.04,-6.04',
                                        MaxValues='6.04,6.04,6.04')

        HKL = ConvertQtoHKLMDHisto(data_norm,
                                   PeaksWorkspace=peaks,
                                   Uproj='1,1,0',
                                   Vproj='-1,1,0',
                                   Extents='-6.04,6.04,-6.04,6.04,-6.04,6.04',
                                   Bins='151,151,151')

        for i in range(HKL.getNumDims()):
            print(HKL.getDimension(i).getUnits(), Q.getDimension(i).getUnits())
            np.testing.assert_equal(
                HKL.getDimension(i).getUnits(),
                Q.getDimension(i).getUnits())

        hkl_data = mtd["HKL"].getSignalArray()
        Q_data = mtd["Q"].getSignalArray()

        print(np.isnan(Q_data).sum())
        print(np.isclose(hkl_data, 0).sum())

        xaxis = mtd["HKL"].getXDimension()
        yaxis = mtd["HKL"].getYDimension()
        zaxis = mtd["HKL"].getZDimension()

        x, y, z = np.meshgrid(
            np.linspace(xaxis.getMinimum(), xaxis.getMaximum(),
                        xaxis.getNBins()),
            np.linspace(yaxis.getMinimum(), yaxis.getMaximum(),
                        yaxis.getNBins()),
            np.linspace(zaxis.getMinimum(), zaxis.getMaximum(),
                        zaxis.getNBins()),
            indexing="ij",
            copy=False,
        )

        print(
            x[~np.isnan(Q_data)].mean(),
            y[~np.isnan(Q_data)].mean(),
            z[~np.isnan(Q_data)].mean(),
        )
        print(
            x[~np.isclose(hkl_data, 0)].mean(),
            y[~np.isclose(hkl_data, 0)].mean(),
            z[~np.isclose(hkl_data, 0)].mean(),
        )
        np.testing.assert_almost_equal(x[~np.isnan(Q_data)].mean(),
                                       x[~np.isclose(hkl_data, 0)].mean(),
                                       decimal=2)
        np.testing.assert_almost_equal(y[~np.isnan(Q_data)].mean(),
                                       y[~np.isclose(hkl_data, 0)].mean(),
                                       decimal=2)
        np.testing.assert_almost_equal(z[~np.isnan(Q_data)].mean(),
                                       z[~np.isclose(hkl_data, 0)].mean(),
                                       decimal=1)
예제 #26
0
    def PyExec(self):
        # create peaks workspace to store linked peaks
        linked_peaks = CreatePeaksWorkspace(
            InstrumentWorkspace=self._workspace,
            NumberOfPeaks=0,
            StoreInADS=False)

        # create peaks table to store linked predicted peaks
        linked_peaks_predicted = CreatePeaksWorkspace(
            InstrumentWorkspace=self._workspace,
            NumberOfPeaks=0,
            StoreInADS=False)

        for m in range(0, self._iterations):
            if m == 0:
                predictor = self._predicted_peaks
            if m > 0:
                predictor = linked_peaks_predicted

            qtol_var = self._qtol * self._qdecrement**m
            num_peaks_var = self._num_peaks + self._peak_increment * m

            # add q_lab and dpsacing values of found peaks to a list
            qlabs_observed = np.array(self._observed_peaks.column(15))
            dspacings_observed = np.array(self._observed_peaks.column(8))

            # sort the predicted peaks from largest to smallest dspacing
            qlabs_predicted = np.array(predictor.column(15))
            dspacings_predicted = np.array(predictor.column(8))

            # get the indexing list that sorts dspacing from largest to
            # smallest
            hkls = np.array([[p['h'], p['k'], p['l']] for p in predictor])
            idx = dspacings_predicted.argsort()[::-1]
            HKL_predicted = hkls[idx, :]

            # sort q, d and h, k, l by this indexing
            qlabs_predicted = qlabs_predicted[idx]
            dspacings_predicted = dspacings_predicted[idx]

            q_ordered = qlabs_predicted[:num_peaks_var]
            d_ordered = dspacings_predicted[:num_peaks_var]
            HKL_ordered = HKL_predicted[:num_peaks_var]

            # loop through the ordered find peaks, compare q and d to each
            # predicted peak if the q and d values of a found peak match a
            # predicted peak within tolerance, the found peak inherits
            # the HKL of the predicted peak
            for i in range(len(qlabs_observed)):
                qx_obs, qy_obs, qz_obs = qlabs_observed[i]
                q_obs = V3D(qx_obs, qy_obs, qz_obs)
                p_obs = linked_peaks.createPeak(q_obs)
                d_obs = dspacings_observed[i]

                for j in range(len(q_ordered)):
                    qx_pred, qy_pred, qz_pred = q_ordered[j]
                    d_pred = d_ordered[j]

                    if (qx_pred - qtol_var <= qx_obs <= qx_pred +
                        qtol_var and qy_pred - qtol_var <= qy_obs <= qy_pred +
                        qtol_var and qz_pred - qtol_var <= qz_obs <= qz_pred +
                        qtol_var and d_pred - self._dtol <= d_obs <= d_pred +
                            self._dtol):
                        h, k, l = HKL_ordered[j]
                        p_obs.setHKL(h, k, l)
                        linked_peaks.addPeak(p_obs)

            # Clean up peaks where H == K == L == 0
            linked_peaks = FilterPeaks(linked_peaks,
                                       FilterVariable="h^2+k^2+l^2",
                                       Operator="!=",
                                       FilterValue="0")

            # force UB on linked_peaks using known lattice parameters
            CalculateUMatrix(PeaksWorkspace=linked_peaks,
                             a=self._a,
                             b=self._b,
                             c=self._c,
                             alpha=self._alpha,
                             beta=self._beta,
                             gamma=self._gamma,
                             StoreInADS=False)

            # new linked predicted peaks
            linked_peaks_predicted = PredictPeaks(
                InputWorkspace=linked_peaks,
                WavelengthMin=self._wavelength_min,
                WavelengthMax=self._wavelength_max,
                MinDSpacing=self._min_dspacing,
                MaxDSpacing=self._max_dspacing,
                ReflectionCondition=self._reflection_condition,
                StoreInADS=False)

        # clean up
        self.setProperty("LinkedPeaks", linked_peaks)
        self.setProperty("LinkedPredictedPeaks", linked_peaks_predicted)
        if mtd.doesExist("linked_peaks"):
            DeleteWorkspace(linked_peaks)
        if mtd.doesExist("linked_peaks_predicted"):
            DeleteWorkspace(linked_peaks_predicted)
        if self._delete_ws:
            DeleteWorkspace(self._workspace)
예제 #27
0
    def PyExec(self):
        input_workspaces = self._expand_groups()
        outWS = self.getPropertyValue("OutputWorkspace")
        CreatePeaksWorkspace(OutputType='LeanElasticPeak',
                             InstrumentWorkspace=input_workspaces[0],
                             NumberOfPeaks=0,
                             OutputWorkspace=outWS,
                             EnableLogging=False)

        method = self.getProperty("Method").value
        n_bkgr_pts = self.getProperty("NumBackgroundPts").value
        n_fwhm = self.getProperty("WidthScale").value
        scale = self.getProperty("ScaleFactor").value
        chisqmax = self.getProperty("ChiSqMax").value
        signalNoiseMin = self.getProperty("SignalNoiseMin").value
        ll = self.getProperty("LowerLeft").value
        ur = self.getProperty("UpperRight").value
        startX = self.getProperty('StartX').value
        endX = self.getProperty('EndX').value
        use_lorentz = self.getProperty("ApplyLorentz").value
        optmize_q = self.getProperty("OptimizeQVector").value
        output_fit = self.getProperty("OutputFitResults").value

        if output_fit and method != "Counts":
            fit_results = WorkspaceGroup()
            AnalysisDataService.addOrReplace(outWS + "_fit_results",
                                             fit_results)

        for inWS in input_workspaces:
            tmp_inWS = '__tmp_' + inWS
            IntegrateMDHistoWorkspace(InputWorkspace=inWS,
                                      P1Bin=f'{ll[1]},{ur[1]}',
                                      P2Bin=f'{ll[0]},{ur[0]}',
                                      OutputWorkspace=tmp_inWS,
                                      EnableLogging=False)
            ConvertMDHistoToMatrixWorkspace(tmp_inWS,
                                            OutputWorkspace=tmp_inWS,
                                            EnableLogging=False)
            data = ConvertToPointData(tmp_inWS,
                                      OutputWorkspace=tmp_inWS,
                                      EnableLogging=False)

            run = mtd[inWS].getExperimentInfo(0).run()
            scan_log = 'omega' if np.isclose(run.getTimeAveragedStd('phi'),
                                             0.0) else 'phi'
            scan_axis = run[scan_log].value
            scan_step = (scan_axis[-1] - scan_axis[0]) / (scan_axis.size - 1)
            data.setX(0, scan_axis)

            y = data.extractY().flatten()
            x = data.extractX().flatten()

            __tmp_pw = CreatePeaksWorkspace(OutputType='LeanElasticPeak',
                                            InstrumentWorkspace=inWS,
                                            NumberOfPeaks=0,
                                            EnableLogging=False)

            if method != "Counts":
                # fit against gaussian with flat background for both the Fitted and CountsWithFitting methods
                fit_result = self._fit_gaussian(inWS, data, x, y, startX, endX,
                                                output_fit)

                if fit_result and fit_result.OutputStatus == 'success' and fit_result.OutputChi2overDoF < chisqmax:
                    B, A, peak_centre, sigma, _ = fit_result.OutputParameters.toDict(
                    )['Value']
                    _, errA, _, errs, _ = fit_result.OutputParameters.toDict(
                    )['Error']

                    if method == "Fitted":
                        integrated_intensity = A * sigma * np.sqrt(2 * np.pi)

                        # Convert correlation back into covariance
                        cor_As = (
                            fit_result.OutputNormalisedCovarianceMatrix.cell(
                                1, 4) / 100 *
                            fit_result.OutputParameters.cell(1, 2) *
                            fit_result.OutputParameters.cell(3, 2))
                        # σ^2 = 2π (A^2 σ_s^2 + σ_A^2 s^2 + 2 A s σ_As)
                        integrated_intensity_error = np.sqrt(
                            2 * np.pi * (A**2 * errs**2 + sigma**2 * errA**2 +
                                         2 * A * sigma * cor_As))

                    elif method == "CountsWithFitting":
                        y = y[slice(
                            np.searchsorted(
                                x, peak_centre - 2.3548 * sigma * n_fwhm / 2),
                            np.searchsorted(
                                x, peak_centre + 2.3548 * sigma * n_fwhm / 2))]
                        # subtract out the fitted flat background
                        integrated_intensity = (y.sum() -
                                                B * y.size) * scan_step
                        integrated_intensity_error = np.sum(
                            np.sqrt(y)) * scan_step

                    # update the goniometer position based on the fitted peak center
                    if scan_log == 'omega':
                        SetGoniometer(Workspace=__tmp_pw,
                                      Axis0=f'{peak_centre},0,1,0,-1',
                                      Axis1='chi,0,0,1,-1',
                                      Axis2='phi,0,1,0,-1',
                                      EnableLogging=False)
                    else:
                        SetGoniometer(Workspace=__tmp_pw,
                                      Axis0='omega,0,1,0,-1',
                                      Axis1='chi,0,0,1,-1',
                                      Axis2=f'{peak_centre},0,1,0,-1',
                                      EnableLogging=False)
                else:
                    self.log().warning(
                        "Failed to fit workspace {}: Output Status={}, ChiSq={}"
                        .format(inWS, fit_result.OutputStatus,
                                fit_result.OutputChi2overDoF))
                    self._delete_tmp_workspaces(str(__tmp_pw), tmp_inWS)
                    continue
            else:
                integrated_intensity, integrated_intensity_error = self._counts_integration(
                    data, n_bkgr_pts, scan_step)

                # set the goniometer position to use the average of the scan
                SetGoniometer(Workspace=__tmp_pw,
                              Axis0='omega,0,1,0,-1',
                              Axis1='chi,0,0,1,-1',
                              Axis2='phi,0,1,0,-1',
                              EnableLogging=False)

            integrated_intensity *= scale
            integrated_intensity_error *= scale

            peak = __tmp_pw.createPeakHKL([
                run['h'].getStatistics().median,
                run['k'].getStatistics().median,
                run['l'].getStatistics().median
            ])
            peak.setWavelength(float(run['wavelength'].value))
            peak.setIntensity(integrated_intensity)
            peak.setSigmaIntensity(integrated_intensity_error)

            if integrated_intensity / integrated_intensity_error > signalNoiseMin:
                __tmp_pw.addPeak(peak)

                # correct q-vector using CentroidPeaksMD
                if optmize_q:
                    __tmp_q_ws = HB3AAdjustSampleNorm(InputWorkspaces=inWS,
                                                      NormaliseBy='None',
                                                      EnableLogging=False)
                    __tmp_pw = CentroidPeaksMD(__tmp_q_ws,
                                               __tmp_pw,
                                               EnableLogging=False)
                    DeleteWorkspace(__tmp_q_ws, EnableLogging=False)

                if use_lorentz:
                    # ILL Neutron Data Booklet, Second Edition, Section 2.9, Part 4.1, Equation 7
                    peak = __tmp_pw.getPeak(0)
                    lorentz = abs(
                        np.sin(peak.getScattering() *
                               np.cos(peak.getAzimuthal())))
                    peak.setIntensity(peak.getIntensity() * lorentz)
                    peak.setSigmaIntensity(peak.getSigmaIntensity() * lorentz)

                CombinePeaksWorkspaces(outWS,
                                       __tmp_pw,
                                       OutputWorkspace=outWS,
                                       EnableLogging=False)

                if output_fit and method != "Counts":
                    fit_results.addWorkspace(
                        RenameWorkspace(tmp_inWS + '_Workspace',
                                        outWS + "_" + inWS + '_Workspace',
                                        EnableLogging=False))
                    fit_results.addWorkspace(
                        RenameWorkspace(tmp_inWS + '_Parameters',
                                        outWS + "_" + inWS + '_Parameters',
                                        EnableLogging=False))
                    fit_results.addWorkspace(
                        RenameWorkspace(
                            tmp_inWS + '_NormalisedCovarianceMatrix',
                            outWS + "_" + inWS + '_NormalisedCovarianceMatrix',
                            EnableLogging=False))
                    fit_results.addWorkspace(
                        IntegrateMDHistoWorkspace(
                            InputWorkspace=inWS,
                            P1Bin=f'{ll[1]},0,{ur[1]}',
                            P2Bin=f'{ll[0]},0,{ur[0]}',
                            P3Bin='0,{}'.format(
                                mtd[inWS].getDimension(2).getNBins()),
                            OutputWorkspace=outWS + "_" + inWS + "_ROI",
                            EnableLogging=False))
            else:
                self.log().warning(
                    "Skipping peak from {} because Signal/Noise={:.3f} which is less than {}"
                    .format(inWS,
                            integrated_intensity / integrated_intensity_error,
                            signalNoiseMin))

            self._delete_tmp_workspaces(str(__tmp_pw), tmp_inWS)

        self.setProperty("OutputWorkspace", mtd[outWS])
    def PyExec(self):
        input_workspaces = self._expand_groups()
        outWS = self.getPropertyValue("OutputWorkspace")
        CreatePeaksWorkspace(OutputType='LeanElasticPeak',
                             InstrumentWorkspace=input_workspaces[0],
                             NumberOfPeaks=0,
                             OutputWorkspace=outWS,
                             EnableLogging=False)

        scale = self.getProperty("ScaleFactor").value
        chisqmax = self.getProperty("ChiSqMax").value
        signalNoiseMin = self.getProperty("SignalNoiseMin").value
        ll = self.getProperty("LowerLeft").value
        ur = self.getProperty("UpperRight").value
        startX = self.getProperty('StartX').value
        endX = self.getProperty('EndX').value
        use_lorentz = self.getProperty("ApplyLorentz").value
        optmize_q = self.getProperty("OptimizeQVector").value
        output_fit = self.getProperty("OutputFitResults").value

        if output_fit:
            fit_results = WorkspaceGroup()
            AnalysisDataService.addOrReplace(outWS + "_fit_results",
                                             fit_results)

        for inWS in input_workspaces:
            tmp_inWS = '__tmp_' + inWS
            IntegrateMDHistoWorkspace(InputWorkspace=inWS,
                                      P1Bin=f'{ll[1]},{ur[1]}',
                                      P2Bin=f'{ll[0]},{ur[0]}',
                                      OutputWorkspace=tmp_inWS,
                                      EnableLogging=False)
            ConvertMDHistoToMatrixWorkspace(tmp_inWS,
                                            OutputWorkspace=tmp_inWS,
                                            EnableLogging=False)
            data = ConvertToPointData(tmp_inWS,
                                      OutputWorkspace=tmp_inWS,
                                      EnableLogging=False)

            run = mtd[inWS].getExperimentInfo(0).run()
            scan_log = 'omega' if np.isclose(run.getTimeAveragedStd('phi'),
                                             0.0) else 'phi'
            scan_axis = run[scan_log].value
            data.setX(0, scan_axis)

            y = data.extractY().flatten()
            x = data.extractX().flatten()
            function = f"name=FlatBackground, A0={np.nanmin(y)};" \
                f"name=Gaussian, PeakCentre={x[np.nanargmax(y)]}, Height={np.nanmax(y)-np.nanmin(y)}, Sigma=0.25"
            constraints = f"f0.A0 > 0, f1.Height > 0, {x.min()} < f1.PeakCentre < {x.max()}"
            try:
                fit_result = Fit(function,
                                 data,
                                 Output=str(data),
                                 IgnoreInvalidData=True,
                                 OutputParametersOnly=not output_fit,
                                 Constraints=constraints,
                                 StartX=startX,
                                 EndX=endX,
                                 EnableLogging=False)
            except RuntimeError as e:
                self.log().warning("Failed to fit workspace {}: {}".format(
                    inWS, e))
                continue

            if fit_result.OutputStatus == 'success' and fit_result.OutputChi2overDoF < chisqmax:
                __tmp_pw = CreatePeaksWorkspace(OutputType='LeanElasticPeak',
                                                InstrumentWorkspace=inWS,
                                                NumberOfPeaks=0,
                                                EnableLogging=False)

                _, A, x, s, _ = fit_result.OutputParameters.toDict()['Value']
                _, errA, _, errs, _ = fit_result.OutputParameters.toDict(
                )['Error']

                if scan_log == 'omega':
                    SetGoniometer(Workspace=__tmp_pw,
                                  Axis0=f'{x},0,1,0,-1',
                                  Axis1='chi,0,0,1,-1',
                                  Axis2='phi,0,1,0,-1',
                                  EnableLogging=False)
                else:
                    SetGoniometer(Workspace=__tmp_pw,
                                  Axis0='omega,0,1,0,-1',
                                  Axis1='chi,0,0,1,-1',
                                  Axis2=f'{x},0,1,0,-1',
                                  EnableLogging=False)

                peak = __tmp_pw.createPeakHKL([
                    run['h'].getStatistics().median,
                    run['k'].getStatistics().median,
                    run['l'].getStatistics().median
                ])
                peak.setWavelength(float(run['wavelength'].value))

                integrated_intensity = A * s * np.sqrt(2 * np.pi) * scale
                peak.setIntensity(integrated_intensity)

                # Convert correlation back into covariance
                cor_As = (
                    fit_result.OutputNormalisedCovarianceMatrix.cell(1, 4) /
                    100 * fit_result.OutputParameters.cell(1, 2) *
                    fit_result.OutputParameters.cell(3, 2))
                # σ^2 = 2π (A^2 σ_s^2 + σ_A^2 s^2 + 2 A s σ_As)
                integrated_intensity_error = np.sqrt(
                    2 * np.pi * (A**2 * errs**2 + s**2 * errA**2 +
                                 2 * A * s * cor_As)) * scale
                peak.setSigmaIntensity(integrated_intensity_error)

                if integrated_intensity / integrated_intensity_error > signalNoiseMin:
                    __tmp_pw.addPeak(peak)

                    # correct q-vector using CentroidPeaksMD
                    if optmize_q:
                        __tmp_q_ws = HB3AAdjustSampleNorm(InputWorkspaces=inWS,
                                                          NormaliseBy='None',
                                                          EnableLogging=False)
                        __tmp_pw = CentroidPeaksMD(__tmp_q_ws,
                                                   __tmp_pw,
                                                   EnableLogging=False)
                        DeleteWorkspace(__tmp_q_ws, EnableLogging=False)

                    if use_lorentz:
                        # ILL Neutron Data Booklet, Second Edition, Section 2.9, Part 4.1, Equation 7
                        peak = __tmp_pw.getPeak(0)
                        lorentz = abs(
                            np.sin(peak.getScattering() *
                                   np.cos(peak.getAzimuthal())))
                        peak.setIntensity(peak.getIntensity() * lorentz)
                        peak.setSigmaIntensity(peak.getSigmaIntensity() *
                                               lorentz)

                    CombinePeaksWorkspaces(outWS,
                                           __tmp_pw,
                                           OutputWorkspace=outWS,
                                           EnableLogging=False)
                    DeleteWorkspace(__tmp_pw, EnableLogging=False)

                    if output_fit:
                        fit_results.addWorkspace(
                            RenameWorkspace(tmp_inWS + '_Workspace',
                                            outWS + "_" + inWS + '_Workspace',
                                            EnableLogging=False))
                        fit_results.addWorkspace(
                            RenameWorkspace(tmp_inWS + '_Parameters',
                                            outWS + "_" + inWS + '_Parameters',
                                            EnableLogging=False))
                        fit_results.addWorkspace(
                            RenameWorkspace(tmp_inWS +
                                            '_NormalisedCovarianceMatrix',
                                            outWS + "_" + inWS +
                                            '_NormalisedCovarianceMatrix',
                                            EnableLogging=False))
                        fit_results.addWorkspace(
                            IntegrateMDHistoWorkspace(
                                InputWorkspace=inWS,
                                P1Bin=f'{ll[1]},0,{ur[1]}',
                                P2Bin=f'{ll[0]},0,{ur[0]}',
                                P3Bin='0,{}'.format(
                                    mtd[inWS].getDimension(2).getNBins()),
                                OutputWorkspace=outWS + "_" + inWS + "_ROI",
                                EnableLogging=False))
                else:
                    self.log().warning(
                        "Skipping peak from {} because Signal/Noise={:.3f} which is less than {}"
                        .format(
                            inWS,
                            integrated_intensity / integrated_intensity_error,
                            signalNoiseMin))
            else:
                self.log().warning(
                    "Failed to fit workspace {}: Output Status={}, ChiSq={}".
                    format(inWS, fit_result.OutputStatus,
                           fit_result.OutputChi2overDoF))

            for tmp_ws in (tmp_inWS, tmp_inWS + '_Workspace',
                           tmp_inWS + '_Parameters',
                           tmp_inWS + '_NormalisedCovarianceMatrix'):
                if mtd.doesExist(tmp_ws):
                    DeleteWorkspace(tmp_ws, EnableLogging=False)

        self.setProperty("OutputWorkspace", mtd[outWS])