def convertToHKL(ws, OutputWorkspace='__md_hkl', norm=None, UB=None, Extents=[-10, 10, -10, 10, -10, 10], Bins=[101, 101, 101], Append=False): """Output MDEventWorkspace in HKL """ SetUB(ws, UB=UB) ConvertToMD(ws, QDimensions='Q3D', QConversionScales='HKL', dEAnalysisMode='Elastic', Q3DFrames='HKL', OutputWorkspace='__temp') AlignedDim0 = "{},{},{},{}".format(mtd['__temp'].getDimension(0).name, Extents[0], Extents[1], int(Bins[0])) AlignedDim1 = "{},{},{},{}".format(mtd['__temp'].getDimension(1).name, Extents[2], Extents[3], int(Bins[1])) AlignedDim2 = "{},{},{},{}".format(mtd['__temp'].getDimension(2).name, Extents[4], Extents[5], int(Bins[2])) BinMD(InputWorkspace='__temp', TemporaryDataWorkspace=OutputWorkspace if Append and mtd.doesExist(OutputWorkspace) else None, OutputWorkspace=OutputWorkspace, AlignedDim0=AlignedDim0, AlignedDim1=AlignedDim1, AlignedDim2=AlignedDim2) DeleteWorkspace('__temp') if norm is not None: SetUB(norm, UB=UB) ConvertToMD(norm, QDimensions='Q3D', QConversionScales='HKL', dEAnalysisMode='Elastic', Q3DFrames='HKL', OutputWorkspace='__temp_norm') BinMD(InputWorkspace='__temp_norm', TemporaryDataWorkspace=str(OutputWorkspace) + '_norm' if Append and mtd.doesExist(str(OutputWorkspace) + '_norm') else None, OutputWorkspace=str(OutputWorkspace) + '_norm', AlignedDim0=AlignedDim0, AlignedDim1=AlignedDim1, AlignedDim2=AlignedDim2) DeleteWorkspace('__temp_norm') return OutputWorkspace
def _create_peaks_workspace(self): """Create a dummy peaks workspace""" path = FileFinder.getFullPath( "IDFs_for_UNIT_TESTING/MINITOPAZ_Definition.xml") inst = LoadEmptyInstrument(Filename=path) ws = CreatePeaksWorkspace(inst, 0) DeleteWorkspace(inst) SetUB(ws, 1, 1, 1, 90, 90, 90) # Add a bunch of random peaks that happen to fall on the # detetor bank defined in the IDF center_q = np.array([-5.1302, 2.5651, 3.71809]) qs = [] for i in np.arange(0, 1, 0.1): for j in np.arange(-0.5, 0, 0.1): q = center_q.copy() q[1] += j q[2] += i qs.append(q) # Add the peaks to the PeaksWorkspace with dummy values for intensity, # Sigma, and HKL for q in qs: peak = ws.createPeak(q) peak.setIntensity(100) peak.setSigmaIntensity(10) peak.setHKL(1, 1, 1) ws.addPeak(peak) return ws
def runTest(self): HelperTestingClass.__init__(self) limits = (-10.0, 10.0, -9.0, 9.0) ws_nonrotho = CreateMDWorkspace( Dimensions=3, Extents=','.join([str(lim) for lim in limits]) + ',-8,8', Names='A,B,C', Units='r.l.u.,r.l.u.,r.l.u.', Frames='HKL,HKL,HKL') expt_info_nonortho = CreateSampleWorkspace() ws_nonrotho.addExperimentInfo(expt_info_nonortho) SetUB(ws_nonrotho, 1, 1, 2, 90, 90, 120) pres = SliceViewer(ws_nonrotho) # assert limits of orthog limits_orthog = pres.view.data_view.get_axes_limits() self.assertEqual(limits_orthog[0], limits[0:2]) self.assertEqual(limits_orthog[1], limits[2:]) # set nonorthog view and retrieve new limits pres.nonorthogonal_axes(True) limits_nonorthog = pres.view.data_view.get_axes_limits() self.assertAlmostEqual(limits_nonorthog[0][0], -19, delta=1e-5) self.assertAlmostEqual(limits_nonorthog[0][1], 19, delta=1e-5) self.assertEqual(limits_nonorthog[1], limits[2:]) pres.view.close()
def example_plots(): #create slices and cuts w = Load( Filename= '/SNS/HYS/IPTS-14189/shared/autoreduce/4pixel/HYS_102102_4pixel_spe.nxs' ) SetUB(w, 4.53, 4.53, 11.2, 90, 90, 90, "1,0,0", "0,0,1") mde = ConvertToMD(w, QDimensions='Q3D') sl1d = CutMD(InputWorkspace=mde, P1Bin='-5,5', P2Bin='-5,5', P3Bin='2,4', P4Bin='-10,0.5,15', NoPix=True) sl2d = CutMD(InputWorkspace=mde, P1Bin='-5,5', P2Bin='-5,5', P3Bin='-5,0.1,5', P4Bin='-10,1,15', NoPix=True) #2 subplots per page fig, ax = plt.subplots(2, 1) Plot1DMD(ax[0], sl1d, NumEvNorm=True, fmt='ro') ax[0].set_ylabel("Int(a.u.)") pcm = Plot2DMD(ax[1], sl2d, NumEvNorm=True) fig.colorbar(pcm, ax=ax[1]) #save to png plt.tight_layout(1.08) fig.savefig('/tmp/test.png')
def setUpClass(cls): def gaussian(x,y,z,x0,y0,z0,ox,oy,oz,A): return A*np.exp(-(x-x0)**2/(2*ox**2)-(y-y0)**2/(2*oy**2)-(z-z0)**2/(2*oz**2)) def peaks(i,j,k): return gaussian(i,j,k,16,100,50,2,2,2,20)+gaussian(i,j,k,16,150,50,1,1,1,10) S=np.fromfunction(peaks,(32,240,100)) ConvertWANDSCDtoQTest_data=CreateMDHistoWorkspace(Dimensionality=3,Extents='0.5,32.5,0.5,240.5,0.5,100.5', SignalInput=S.ravel('F'),ErrorInput=np.sqrt(S.ravel('F')), NumberOfBins='32,240,100',Names='y,x,scanIndex',Units='bin,bin,number') ConvertWANDSCDtoQTest_dummy = CreateSingleValuedWorkspace() ConvertWANDSCDtoQTest_data.addExperimentInfo(ConvertWANDSCDtoQTest_dummy) ConvertWANDSCDtoQTest_data.getExperimentInfo(0).run().addProperty('s1', list(np.arange(0,50,0.5)), True) ConvertWANDSCDtoQTest_data.getExperimentInfo(0).run().addProperty('duration', [60.]*100, True) ConvertWANDSCDtoQTest_data.getExperimentInfo(0).run().addProperty('monitor_count', [120000.]*100, True) ConvertWANDSCDtoQTest_data.getExperimentInfo(0).run().addProperty('twotheta', list(np.linspace(np.pi*2/3,0,240).repeat(32)), True) ConvertWANDSCDtoQTest_data.getExperimentInfo(0).run().addProperty('azimuthal', list(np.tile(np.linspace(-0.15,0.15,32),240)), True) SetUB(ConvertWANDSCDtoQTest_data, 5,5,7,90,90,120,u=[-1,0,1],v=[1,0,1]) # Create Normalisation workspace S=np.ones((32,240,1)) ConvertWANDSCDtoQTest_norm=CreateMDHistoWorkspace(Dimensionality=3,Extents='0.5,32.5,0.5,240.5,0.5,1.5',SignalInput=S,ErrorInput=S, NumberOfBins='32,240,1',Names='y,x,scanIndex',Units='bin,bin,number') ConvertWANDSCDtoQTest_dummy2 = CreateSingleValuedWorkspace() ConvertWANDSCDtoQTest_norm.addExperimentInfo(ConvertWANDSCDtoQTest_dummy2) ConvertWANDSCDtoQTest_norm.getExperimentInfo(0).run().addProperty('monitor_count', [100000.], True)
def setUpClass(cls): cls.histo_ws = CreateMDHistoWorkspace( Dimensionality=3, Extents='-3,3,-10,10,-1,1', SignalInput=range(100), ErrorInput=range(100), NumberOfBins='5,5,4', Names='Dim1,Dim2,Dim3', Units='MomentumTransfer,EnergyTransfer,Angstrom', OutputWorkspace='ws_MD_2d') cls.histo_ws_positive = CreateMDHistoWorkspace( Dimensionality=3, Extents='-3,3,-10,10,-1,1', SignalInput=range(1, 101), ErrorInput=range(100), NumberOfBins='5,5,4', Names='Dim1,Dim2,Dim3', Units='MomentumTransfer,EnergyTransfer,Angstrom', OutputWorkspace='ws_MD_2d_pos') cls.hkl_ws = CreateMDWorkspace(Dimensions=3, Extents='-10,10,-9,9,-8,8', Names='A,B,C', Units='r.l.u.,r.l.u.,r.l.u.', Frames='HKL,HKL,HKL', OutputWorkspace='hkl_ws') expt_info = CreateSampleWorkspace() cls.hkl_ws.addExperimentInfo(expt_info) SetUB('hkl_ws', 1, 1, 1, 90, 90, 90)
def findConsistentUB(self, ubFiles, zeroUB, matUB, omega, dOmega, omegaHand, phiRef, dPhiRef, phiTol, phiHand, chiRef, chiTol, gonioTable): # calculate the rotation matrix that maps the UB of the first run onto subsequent UBs # get save directory saveDir = config['defaultsave.directory'] tmpWS = CreateSampleWorkspace() for irun in range(1, len(omega)): chi, phi, u = self.getGonioAngles(matUB[irun], zeroUB, omega[irun]) # phi relative to first run in RH/LH convention of user # check if phi and chi are not within tolerance of expected if abs(chi - chiRef) > chiTol and abs(phi - dPhiRef[irun]) > phiTol: # generate predicted UB to find axes permutation self.log().information( "The following UB\n{}\nis not consistent with the reference, attempting to " "find an axes swap/inversion that make it consistent.") # nominal goniometer axis gonio = getR(omegaHand * dOmega, [0, 0, 1]) @ getR( -chiRef, [1, 0, 0]) @ [0, 0, 1] predictedUB = getR(omega[irun], [0, 0, 1]) @ getR( dPhiRef[irun], gonio) @ zeroUB # try a permutation of the UB axes (as in TransformHKL) # UB' = UB M^-1 # HKL' = M HKL minv = np.linalg.inv(matUB[irun]) @ predictedUB minv = getSignMaxAbsValInCol(minv) # redo angle calculation on permuted UB matUB[irun] = matUB[irun] @ minv chi, phi, u = self.getGonioAngles(matUB[irun], zeroUB, omega[irun]) if abs(chi - chiRef) <= chiTol and abs(phi - dPhiRef[irun]) <= phiTol: # save the consistent UB to the default save directory _, nameUB = path.split(ubFiles[irun]) newUBPath = path.join( saveDir, nameUB[:-4] + '_consistent' + nameUB[-4:]) # set as UB (converting back to non-IPNS convention) SetUB(tmpWS, UB=matUB[irun][[1, 2, 0], :]) SaveIsawUB(tmpWS, newUBPath) # populate row of table phi2print = phiHand * (phi + phiRef[0]) phi2print = phi2print + np.ceil(-phi2print / 360) * 360 nextRow = { 'Run': nameUB[:-4], 'Chi': chi, 'Phi': phi2print, 'GonioAxis': V3D(u[0], u[1], u[2]) } gonioTable.addRow(nextRow) else: warnings.warn( "WARNING: The UB {0} cannot be made consistent with the reference UB. " "Check the goniometer angles and handedness supplied " "and the accuracy of reference UB.".format(ubFiles[irun])) DeleteWorkspace(tmpWS)
def create_hkl_ws(): hkl_ws = CreateMDWorkspace(Dimensions=3, Extents='-10,10,-9,9,-8,8', Names='A,B,C', Units='r.l.u.,r.l.u.,r.l.u.', Frames='HKL,HKL,HKL', OutputWorkspace='hkl_ws') expt_info = CreateSampleWorkspace() hkl_ws.addExperimentInfo(expt_info) SetUB(hkl_ws, 1, 1, 1, 90, 90, 90) return hkl_ws
def test_lattice_accessors(self): instrument_ws = CreateSampleWorkspace() peaks = CreatePeaksWorkspace(instrument_ws, 0) SetUB(peaks, 1, 1, 1, 90, 90, 90) sample = peaks.sample() self.assertTrue(sample.hasOrientedLattice()) self.assertTrue( isinstance(sample.getOrientedLattice(), OrientedLattice)) sample.clearOrientedLattice() self.assertFalse(sample.hasOrientedLattice())
def test_finds_average_lattice_parameter(self): # create two peak tables with UB corresponding to different lattice constant, a peaks1 = CreatePeaksWorkspace(InstrumentWorkspace=self.ws, NumberOfPeaks=0, OutputWorkspace="SXD_peaks1") UB = np.diag([1.0 / 3.9, 0.25, 0.1]) # alatt = [3.9, 4, 10] SetUB(peaks1, UB=UB) peaks2 = CreatePeaksWorkspace(InstrumentWorkspace=self.ws, NumberOfPeaks=0, OutputWorkspace="SXD_peaks2") UB = np.diag([1.0 / 4.1, 0.25, 0.1]) # alatt = [4.1, 4, 10] SetUB(peaks2, UB=UB) # Add some peaks add_peaksHKL([peaks1, peaks2], range(0, 3), range(0, 3), 4) FindGlobalBMatrix(PeakWorkspaces=[peaks1, peaks2], a=4.1, b=4.2, c=10, alpha=88, beta=88, gamma=89, Tolerance=0.15) # check lattice - should have average a=4.0 self.assert_lattice([peaks1, peaks2], 4.0, 4.0, 10.0, 90.0, 90.0, 90.0, delta_latt=5e-2, delta_angle=2.5e-1) self.assert_matrix([peaks1], getBMatrix(peaks2), getBMatrix, delta=1e-10) # should have same B matrix self.assert_matrix([peaks1, peaks2], np.eye(3), getUMatrix, delta=5e-2)
def runTest(self): HelperTestingClass.__init__(self) ws_4D = CreateMDWorkspace(Dimensions=4, Extents=[-1, 1, -1, 1, -1, 1, -1, 1], Names="E,H,K,L", Frames='General Frame,HKL,HKL,HKL', Units='meV,r.l.u.,r.l.u.,r.l.u.') expt_info_4D = CreateSampleWorkspace() ws_4D.addExperimentInfo(expt_info_4D) SetUB(ws_4D, 1, 1, 2, 90, 90, 120) pres = SliceViewer(ws_4D) self._qapp.sendPostedEvents() non_ortho_action = toolbar_actions(pres, [ToolItemText.NONORTHOGONAL_AXES])[0] self.assertFalse(non_ortho_action.isEnabled()) pres.view.close()
def runTest(self): ws = LoadRaw(Filename='WISH00038237.raw', OutputWorkspace='38237') ws = ConvertUnits(ws, 'dSpacing', OutputWorkspace='38237') UB = np.array([[-0.00601763, 0.07397297, 0.05865706], [ 0.05373321, 0.050198, -0.05651455], [-0.07822144, 0.0295911, -0.04489172]]) SetUB(ws, UB=UB) self._peaks = PredictPeaks(ws, WavelengthMin=0.1, WavelengthMax=100, OutputWorkspace='peaks') # We specifically want to check peak -5 -1 -7 exists, so filter for it self._filtered = FilterPeaks(self._peaks, "h^2+k^2+l^2", 75, '=', OutputWorkspace='filtered') SaveIsawPeaks(self._peaks, Filename='WISHSXReductionPeaksTest.peaks')
def runTest(self): HelperTestingClass.__init__(self) ws_non_ortho = CreateMDWorkspace(Dimensions='3', Extents='-6,6,-4,4,-0.5,0.5', Names='H,K,L', Units='r.l.u.,r.l.u.,r.l.u.', Frames='HKL,HKL,HKL', SplitInto='2', SplitThreshold='50') expt_info_nonortho = CreateSampleWorkspace() ws_non_ortho.addExperimentInfo(expt_info_nonortho) SetUB(ws_non_ortho, 1, 1, 2, 90, 90, 120) pres = SliceViewer(ws_non_ortho) ClearUB(ws_non_ortho) self._qapp.sendPostedEvents() self.assert_no_toplevel_widgets() self.assertEqual(pres.ads_observer, None)
def test_performs_correct_transform_to_ensure_consistent_indexing(self): # create peaks tables peaks1 = CreatePeaksWorkspace(InstrumentWorkspace=self.ws, NumberOfPeaks=0, OutputWorkspace="SXD_peaks7") UB = np.diag([0.2, 0.25, 0.1]) SetUB(peaks1, UB=UB) # Add some peaks add_peaksHKL([peaks1], range(0, 3), range(0, 3), 4) # Clone ws and transform peaks2 = CloneWorkspace(InputWorkspace=peaks1, OutputWorkspace="SXD_peaks8") peaks2.removePeak( 0) # peaks1 will have most peaks indexed so will used as reference transform = np.array([[0, 1, 0], [1, 0, 0], [0, 0, -1]]) TransformHKL(PeaksWorkspace=peaks2, HKLTransform=transform, FindError=False) FindGlobalBMatrix(PeakWorkspaces=[peaks1, peaks2], a=4.15, b=3.95, c=10, alpha=88, beta=88, gamma=89, Tolerance=0.15) # check lattice - shouldn't be effected by error in goniometer self.assert_lattice([peaks1, peaks2], 5.0, 4.0, 10.0, 90.0, 90.0, 90.0, delta_latt=5e-2, delta_angle=2.5e-1) self.assert_matrix([peaks1], getBMatrix(peaks2), getBMatrix, delta=1e-10) # should have same B matrix self.assert_matrix([peaks1, peaks2], np.eye(3), getUMatrix, delta=5e-2)
def setUp(self): # load empty instrument so can create a peak table self.ws = LoadEmptyInstrument(InstrumentName='SXD', OutputWorkspace='sxd') ub = np.array([[-0.00601763, 0.07397297, 0.05865706], [0.05373321, 0.050198, -0.05651455], [-0.07822144, 0.0295911, -0.04489172]]) SetUB(self.ws, UB=ub) PredictPeaks(self.ws, WavelengthMin=1, WavelengthMax=1.1, MinDSpacing=1, MaxDSPacing=1.1, OutputWorkspace='test') # 8 peaks PredictSatellitePeaks(Peaks='test', SatellitePeaks='test_sat', ModVector1='0,0,0.33', MaxOrder=1) self.peaks = CombinePeaksWorkspaces(LHSWorkspace='test_sat', RHSWorkspace='test', OutputWorkspace='test')
def test_requires_more_than_one_peak_workspace(self): peaks1 = CreatePeaksWorkspace(InstrumentWorkspace=self.ws, NumberOfPeaks=0, OutputWorkspace="SXD_peaks4") UB = np.diag([0.25, 0.25, 0.1]) SetUB(peaks1, UB=UB) # Add some peaks add_peaksHKL([peaks1], range(0, 3), range(0, 3), 4) alg = create_algorithm('FindGlobalBMatrix', PeakWorkspaces=[peaks1], a=4.1, b=4.2, c=10, alpha=88, beta=88, gamma=89, Tolerance=0.15) with self.assertRaises(RuntimeError): alg.execute()
def example_pdf(): #create slices and cuts w = Load( Filename= '/SNS/HYS/IPTS-14189/shared/autoreduce/4pixel/HYS_102102_4pixel_spe.nxs' ) SetUB(w, 4.53, 4.53, 11.2, 90, 90, 90, "1,0,0", "0,0,1") mde = ConvertToMD(w, QDimensions='Q3D') with PdfPages('/tmp/multipage_pdf.pdf') as pdf: for i in range(4): llims = str(i - 0.5) + ',' + str(i + 0.5) sl1d = CutMD(InputWorkspace=mde, P1Bin='-5,5', P2Bin='-5,5', P3Bin=llims, P4Bin='-10,0.5,15', NoPix=True) fig, ax = plt.subplots() Plot1DMD(ax, sl1d, NumEvNorm=True, fmt='ko') ax.set_title("L=[" + llims + "]") pdf.savefig(fig) plt.close('all')
def convertToHKL(ws, OutputWorkspace='__md_hkl', UB=None, Append=False, scale=None, BinningDim0='-10.05,10.05,201', BinningDim1='-10.05,10.05,201', BinningDim2='-10.05,10.05,201', Uproj=(1, 0, 0), Vproj=(0, 1, 0), Wproj=(0, 0, 1)): """Output MDHistoWorkspace in HKL """ SetUB(ws, UB=UB) ConvertToMD(ws, QDimensions='Q3D', QConversionScales='HKL', dEAnalysisMode='Elastic', Q3DFrames='HKL', OutputWorkspace='__temp', Uproj=Uproj, Vproj=Vproj, Wproj=Wproj) if scale is not None: mtd['__temp'] *= scale BinMD(InputWorkspace='__temp', TemporaryDataWorkspace=OutputWorkspace if Append and mtd.doesExist(OutputWorkspace) else None, OutputWorkspace=OutputWorkspace, AlignedDim0=mtd['__temp'].getDimension(0).name + ',' + BinningDim0, AlignedDim1=mtd['__temp'].getDimension(1).name + ',' + BinningDim1, AlignedDim2=mtd['__temp'].getDimension(2).name + ',' + BinningDim2) DeleteWorkspace('__temp') return OutputWorkspace
def test_peak_workspaces_need_at_least_six_peaks_each(self): peaks1 = CreatePeaksWorkspace(InstrumentWorkspace=self.ws, NumberOfPeaks=0, OutputWorkspace="SXD_peaks5") UB = np.diag([0.25, 0.25, 0.1]) SetUB(peaks1, UB=UB) # Add 5 peaks add_peaksHKL([peaks1], range(0, 5), [0], 4) peaks2 = CloneWorkspace(InputWorkspace=peaks1, OutputWorkspace="SXD_peaks6") alg = create_algorithm('FindGlobalBMatrix', PeakWorkspaces=[peaks1, peaks2], a=4.1, b=4.2, c=10, alpha=88, beta=88, gamma=89, Tolerance=0.15) with self.assertRaises(RuntimeError): alg.execute()
def test_HKL(self): md = CreateMDWorkspace(Dimensions=3, Extents='0,10,0,10,0,10', Names='H,K,L', Units='r.l.u.,r.l.u.,r.l.u.', Frames='HKL,HKL,HKL') pw_name = 'peaks_add_delete_test' CreatePeaksWorkspace(OutputType='LeanElasticPeak', NUmberOfPeaks=0, OutputWorkspace=pw_name) SetUB(pw_name, 2 * np.pi, 2 * np.pi, 4 * np.pi, u='0,0,1', v='1,0,0') self.assertEqual(mtd[pw_name].getNumberPeaks(), 0) sliceViewer = SliceViewer(md) # select z=3.0 slice sliceViewer.view.dimensions.set_slicepoint((None, None, 3.0)) # overlay_peaks_workspaces sliceViewer._create_peaks_presenter_if_necessary( ).overlay_peaksworkspaces([pw_name]) # click the "Add Peaks" button sliceViewer.view.peaks_view.peak_actions_view.ui.add_peaks_button.click( ) # click on 3 different points on the canvas sliceViewer.canvas_clicked( Mock(inaxes=True, xdata=1.0, ydata=2.0)) # should add a peak at HKL=(1, 2, 3) sliceViewer.canvas_clicked( Mock(inaxes=True, xdata=2.0, ydata=2.0)) # should add a peak at HKL=(2, 2, 3) sliceViewer.canvas_clicked( Mock(inaxes=True, xdata=1.5, ydata=1.5)) # should add a peak at HKL=(1.5, 1.5, 3) # peaks should be added self.assertEqual(mtd[pw_name].getNumberPeaks(), 3) # (1, 2, 3) peak = mtd[pw_name].getPeak(0) q_sample = peak.getQSampleFrame() self.assertAlmostEqual(q_sample[0], 1.0, delta=1e-10) self.assertAlmostEqual(q_sample[1], 2.0, delta=1e-10) self.assertAlmostEqual(q_sample[2], 1.5, delta=1e-10) self.assertAlmostEqual(peak.getH(), 1, delta=1e-10) self.assertAlmostEqual(peak.getK(), 2, delta=1e-10) self.assertAlmostEqual(peak.getL(), 3, delta=1e-10) # (2, 2, 3) peak = mtd[pw_name].getPeak(1) q_sample = peak.getQSampleFrame() self.assertAlmostEqual(q_sample[0], 2.0, delta=1e-10) self.assertAlmostEqual(q_sample[1], 2.0, delta=1e-10) self.assertAlmostEqual(q_sample[2], 1.5, delta=1e-10) self.assertAlmostEqual(peak.getH(), 2, delta=1e-10) self.assertAlmostEqual(peak.getK(), 2, delta=1e-10) self.assertAlmostEqual(peak.getL(), 3, delta=1e-10) # (1.5, 1.5, 3) peak = mtd[pw_name].getPeak(2) q_sample = peak.getQSampleFrame() self.assertAlmostEqual(q_sample[0], 1.5, delta=1e-10) self.assertAlmostEqual(q_sample[1], 1.5, delta=1e-10) self.assertAlmostEqual(q_sample[2], 1.5, delta=1e-10) self.assertAlmostEqual(peak.getH(), 1.5, delta=1e-10) self.assertAlmostEqual(peak.getK(), 1.5, delta=1e-10) self.assertAlmostEqual(peak.getL(), 3, delta=1e-10) # click the "Remove Peaks" button sliceViewer.view.peaks_view.peak_actions_view.ui.remove_peaks_button.click( ) sliceViewer.canvas_clicked(Mock( inaxes=True, xdata=2.0, ydata=1.9)) # should remove the peak closest to HKL=(2, 1.9, 3) self.assertEqual(mtd[pw_name].getNumberPeaks(), 2) # should have deleted the (2, 2, 3) peak, leaving (1, 2, 3) and (1.5, 1.5, 3) # (1, 2, 3) peak = mtd[pw_name].getPeak(0) q_sample = peak.getQSampleFrame() self.assertAlmostEqual(q_sample[0], 1.0, delta=1e-10) self.assertAlmostEqual(q_sample[1], 2.0, delta=1e-10) self.assertAlmostEqual(q_sample[2], 1.5, delta=1e-10) self.assertAlmostEqual(peak.getH(), 1, delta=1e-10) self.assertAlmostEqual(peak.getK(), 2, delta=1e-10) self.assertAlmostEqual(peak.getL(), 3, delta=1e-10) # (1.5, 1.5, 3) peak = mtd[pw_name].getPeak(1) q_sample = peak.getQSampleFrame() self.assertAlmostEqual(q_sample[0], 1.5, delta=1e-10) self.assertAlmostEqual(q_sample[1], 1.5, delta=1e-10) self.assertAlmostEqual(q_sample[2], 1.5, delta=1e-10) self.assertAlmostEqual(peak.getH(), 1.5, delta=1e-10) self.assertAlmostEqual(peak.getK(), 1.5, delta=1e-10) self.assertAlmostEqual(peak.getL(), 3, delta=1e-10)
def runTest(self): # Na Mn Cl3 # R -3 H (148) # 6.592 6.592 18.585177 90 90 120 # UB/wavelength from /HFIR/HB3A/IPTS-25470/shared/autoreduce/HB3A_exp0769_scan0040.nxs ub = np.array([[1.20297e-01, 1.70416e-01, 1.43000e-04], [8.16000e-04, -8.16000e-04, 5.38040e-02], [1.27324e-01, -4.05110e-02, -4.81000e-04]]) wavelength = 1.553 # create fake MDEventWorkspace, similar to what is expected from exp769 after loading with HB3AAdjustSampleNorm MD_Q_sample = CreateMDWorkspace( Dimensions='3', Extents='-5,5,-5,5,-5,5', Names='Q_sample_x,Q_sample_y,Q_sample_z', Units='rlu,rlu,rlu', Frames='QSample,QSample,QSample') inst = LoadEmptyInstrument(InstrumentName='HB3A') AddTimeSeriesLog(inst, 'omega', '2010-01-01T00:00:00', 0.) AddTimeSeriesLog(inst, 'phi', '2010-01-01T00:00:00', 0.) AddTimeSeriesLog(inst, 'chi', '2010-01-01T00:00:00', 0.) MD_Q_sample.addExperimentInfo(inst) SetUB(MD_Q_sample, UB=ub) ol = OrientedLattice() ol.setUB(ub) sg = SpaceGroupFactory.createSpaceGroup("R -3") hkl = [] sat_hkl = [] for h in range(0, 6): for k in range(0, 6): for l in range(0, 11): if sg.isAllowedReflection([h, k, l]): if h == k == l == 0: continue q = V3D(h, k, l) q_sample = ol.qFromHKL(q) if not np.any(np.array(q_sample) > 5): hkl.append(q) FakeMDEventData( MD_Q_sample, PeakParams='1000,{},{},{},0.05'.format( *q_sample)) # satellite peaks at 0,0,+1.5 q = V3D(h, k, l + 1.5) q_sample = ol.qFromHKL(q) if not np.any(np.array(q_sample) > 5): sat_hkl.append(q) FakeMDEventData( MD_Q_sample, PeakParams='100,{},{},{},0.02'.format( *q_sample)) # satellite peaks at 0,0,-1.5 q = V3D(h, k, l - 1.5) q_sample = ol.qFromHKL(q) if not np.any(np.array(q_sample) > 5): sat_hkl.append(q) FakeMDEventData( MD_Q_sample, PeakParams='100,{},{},{},0.02'.format( *q_sample)) # Check that this fake workpsace gives us the expected UB peaks = FindPeaksMD(MD_Q_sample, PeakDistanceThreshold=1, OutputType='LeanElasticPeak') FindUBUsingFFT(peaks, MinD=5, MaxD=20) ShowPossibleCells(peaks) SelectCellOfType(peaks, CellType='Rhombohedral', Centering='R', Apply=True) OptimizeLatticeForCellType(peaks, CellType='Hexagonal', Apply=True) found_ol = peaks.sample().getOrientedLattice() self.assertAlmostEqual(found_ol.a(), 6.592, places=2) self.assertAlmostEqual(found_ol.b(), 6.592, places=2) self.assertAlmostEqual(found_ol.c(), 18.585177, places=2) self.assertAlmostEqual(found_ol.alpha(), 90) self.assertAlmostEqual(found_ol.beta(), 90) self.assertAlmostEqual(found_ol.gamma(), 120) # nuclear peaks predict = HB3APredictPeaks( MD_Q_sample, Wavelength=wavelength, ReflectionCondition='Rhombohedrally centred, obverse', SatellitePeaks=True, IncludeIntegerHKL=True) predict = HB3AIntegratePeaks(MD_Q_sample, predict, 0.25) self.assertEqual(predict.getNumberPeaks(), 66) # check that the found peaks are expected for n in range(predict.getNumberPeaks()): HKL = predict.getPeak(n).getHKL() self.assertTrue(HKL in hkl, msg=f"Peak {n} with HKL={HKL}") # magnetic peaks satellites = HB3APredictPeaks( MD_Q_sample, Wavelength=wavelength, ReflectionCondition='Rhombohedrally centred, obverse', SatellitePeaks=True, ModVector1='0,0,1.5', MaxOrder=1, IncludeIntegerHKL=False) satellites = HB3AIntegratePeaks(MD_Q_sample, satellites, 0.1) self.assertEqual(satellites.getNumberPeaks(), 80) # check that the found peaks are expected for n in range(satellites.getNumberPeaks()): HKL = satellites.getPeak(n).getHKL() self.assertTrue(HKL in sat_hkl, msg=f"Peak {n} with HKL={HKL}")
def _determine_single_crystal_diffraction(self): """ All work related to the determination of the diffraction pattern """ a, b, c = self.getProperty('LatticeSizes').value alpha, beta, gamma = self.getProperty('LatticeAngles').value u = self.getProperty('VectorU').value v = self.getProperty('VectorV').value uproj = self.getProperty('Uproj').value vproj = self.getProperty('Vproj').value wproj = self.getProperty('Wproj').value n_bins = self.getProperty('NBins').value self._n_bins = (n_bins, n_bins, 1) axis0 = '{},0,1,0,1'.format(self.getProperty('PsiAngleLog').value) axis1 = '{},0,1,0,1'.format(self.getProperty('PsiOffset').value) # Options for SetUB independent of run ub_args = dict(a=a, b=b, c=c, alpha=alpha, beta=beta, gamma=gamma, u=u, v=v) min_values = None # Options for algorithm ConvertToMD independent of run convert_to_md_kwargs = dict(QDimensions='Q3D', dEAnalysisMode='Elastic', Q3DFrames='HKL', QConversionScales='HKL', Uproj=uproj, Vproj=vproj, Wproj=wproj) md_norm_scd_kwargs = None # Options for algorithm MDNormSCD # Find solid angle and flux if self._vanadium_files: kwargs = dict(Filename='+'.join(self._vanadium_files), MaskFile=self.getProperty("MaskFile").value, MomentumMin=self._momentum_range[0], MomentumMax=self._momentum_range[1]) _t_solid_angle, _t_int_flux = \ MDNormSCDPreprocessIncoherent(**kwargs) else: _t_solid_angle = self.nominal_solid_angle('_t_solid_angle') _t_int_flux = self.nominal_integrated_flux('_t_int_flux') # Process a sample at a time run_numbers = self._getRuns(self.getProperty("RunNumbers").value, doIndiv=True) run_numbers = list(itertools.chain.from_iterable(run_numbers)) diffraction_reporter = Progress(self, start=0.0, end=1.0, nreports=len(run_numbers)) for i_run, run in enumerate(run_numbers): _t_sample = self._mask_t0_crop(run, '_t_sample') # Set Goniometer and UB matrix SetGoniometer(_t_sample, Axis0=axis0, Axis1=axis1) SetUB(_t_sample, **ub_args) if self._bkg: self._bkg.run().getGoniometer().\ setR(_t_sample.run().getGoniometer().getR()) SetUB(self._bkg, **ub_args) # Determine limits for momentum transfer in HKL space. Needs to be # done only once. We use the first run. if min_values is None: kwargs = dict(QDimensions='Q3D', dEAnalysisMode='Elastic', Q3DFrames='HKL') min_values, max_values = ConvertToMDMinMaxGlobal( _t_sample, **kwargs) convert_to_md_kwargs.update({ 'MinValues': min_values, 'MaxValues': max_values }) # Convert to MD _t_md = ConvertToMD(_t_sample, OutputWorkspace='_t_md', **convert_to_md_kwargs) if self._bkg: _t_bkg_md = ConvertToMD(self._bkg, OutputWorkspace='_t_bkg_md', **convert_to_md_kwargs) # Determine aligned dimensions. Need to be done only once if md_norm_scd_kwargs is None: aligned = list() for i_dim in range(3): kwargs = { 'name': _t_md.getDimension(i_dim).name, 'min': min_values[i_dim], 'max': max_values[i_dim], 'n_bins': self._n_bins[i_dim] } aligned.append( '{name},{min},{max},{n_bins}'.format(**kwargs)) md_norm_scd_kwargs = dict(AlignedDim0=aligned[0], AlignedDim1=aligned[1], AlignedDim2=aligned[2], FluxWorkspace=_t_int_flux, SolidAngleWorkspace=_t_solid_angle, SkipSafetyCheck=True) # Normalize sample by solid angle and integrated flux; # Accumulate runs into the temporary workspaces MDNormSCD(_t_md, OutputWorkspace='_t_data', OutputNormalizationWorkspace='_t_norm', TemporaryDataWorkspace='_t_data' if mtd.doesExist('_t_data') else None, TemporaryNormalizationWorkspace='_t_norm' if mtd.doesExist('_t_norm') else None, **md_norm_scd_kwargs) if self._bkg: MDNormSCD(_t_bkg_md, OutputWorkspace='_t_bkg_data', OutputNormalizationWorkspace='_t_bkg_norm', TemporaryDataWorkspace='_t_bkg_data' if mtd.doesExist('_t_bkg_data') else None, TemporaryNormalizationWorkspace='_t_bkg_norm' if mtd.doesExist('_t_bkg_norm') else None, **md_norm_scd_kwargs) message = 'Processing sample {} of {}'.\ format(i_run+1, len(run_numbers)) diffraction_reporter.report(message) self._temps.workspaces.append('PreprocessedDetectorsWS') # to remove # Iteration over the sample runs is done. # Division by vanadium, subtract background, and rename workspaces name = self.getPropertyValue("OutputWorkspace") _t_data = DivideMD(LHSWorkspace='_t_data', RHSWorkspace='_t_norm') if self._bkg: _t_bkg_data = DivideMD(LHSWorkspace='_t_bkg_data', RHSWorkspace='_t_bkg_norm') _t_scale = CreateSingleValuedWorkspace(DataValue=self._bkg_scale) _t_bkg_data = MultiplyMD(_t_bkg_data, _t_scale) ws = MinusMD(_t_data, _t_bkg_data) RenameWorkspace(_t_data, OutputWorkspace=name + '_dat') RenameWorkspace(_t_bkg_data, OutputWorkspace=name + '_bkg') else: ws = _t_data RenameWorkspace(ws, OutputWorkspace=name) self.setProperty("OutputWorkspace", ws) diffraction_reporter.report(len(run_numbers), 'Done')
ws = LoadWAND(IPTS=21442, RunNumbers=run, Grouping='4x4') ub = np.array(re.findall( r'-?\d+\.*\d*', ws.run().getProperty('HB2C:CS:CrystalAlign:UBMatrix').value[0]), dtype=np.float).reshape(3, 3) sgl = np.deg2rad(ws.run().getProperty( 'HB2C:Mot:sgl.RBV').value[0]) # 'HB2C:Mot:sgl.RBV,1,0,0,-1' sgu = np.deg2rad(ws.run().getProperty( 'HB2C:Mot:sgu.RBV').value[0]) # 'HB2C:Mot:sgu.RBV,0,0,1,-1' sgl_a = np.array([[1, 0, 0], [0, np.cos(sgl), np.sin(sgl)], [0, -np.sin(sgl), np.cos(sgl)]]) sgu_a = np.array([[np.cos(sgu), np.sin(sgu), 0], [-np.sin(sgu), np.cos(sgu), 0], [0, 0, 1]]) UB = sgl_a.dot(sgu_a).dot( ub) # Apply the Goniometer tilts to the UB matrix SetUB(ws, UB=UB) md = ConvertToMD(ws, QDimensions='Q3D', dEAnalysisMode='Elastic', Q3DFrames='HKL', QConversionScales='HKL', OtherDimensions='HB2C:SE:SampleTemp', MinValues='-10,-10,-10,0', MaxValues='10,10,10,30') if 'data' in mtd: PlusMD(LHSWorkspace='data', RHSWorkspace='md', OutputWorkspace='data') else: CloneMDWorkspace(InputWorkspace='md', OutputWorkspace='data') BinMD(InputWorkspace='data', AlignedDim0='[H,0,0],-1,3,200',
def PyExec(self): # remove possible old temp workspaces [ DeleteWorkspace(ws) for ws in self.temp_workspace_list if mtd.doesExist(ws) ] _background = bool(self.getProperty("Background").value) _load_inst = bool(self.getProperty("LoadInstrument").value) _detcal = bool(self.getProperty("DetCal").value) _masking = bool(self.getProperty("MaskFile").value) _outWS_name = self.getPropertyValue("OutputWorkspace") UBList = self._generate_UBList() dim0_min, dim0_max, dim0_bins = self.getProperty('BinningDim0').value dim1_min, dim1_max, dim1_bins = self.getProperty('BinningDim1').value dim2_min, dim2_max, dim2_bins = self.getProperty('BinningDim2').value MinValues = "{},{},{}".format(dim0_min, dim1_min, dim2_min) MaxValues = "{},{},{}".format(dim0_max, dim1_max, dim2_max) AlignedDim0 = ",{},{},{}".format(dim0_min, dim0_max, int(dim0_bins)) AlignedDim1 = ",{},{},{}".format(dim1_min, dim1_max, int(dim1_bins)) AlignedDim2 = ",{},{},{}".format(dim2_min, dim2_max, int(dim2_bins)) LoadNexus(Filename=self.getProperty("SolidAngle").value, OutputWorkspace='__sa') LoadNexus(Filename=self.getProperty("Flux").value, OutputWorkspace='__flux') if _masking: LoadMask(Instrument=mtd['__sa'].getInstrument().getName(), InputFile=self.getProperty("MaskFile").value, OutputWorkspace='__mask') MaskDetectors(Workspace='__sa', MaskedWorkspace='__mask') DeleteWorkspace('__mask') XMin = mtd['__sa'].getXDimension().getMinimum() XMax = mtd['__sa'].getXDimension().getMaximum() if _background: Load(Filename=self.getProperty("Background").value, OutputWorkspace='__bkg', FilterByTofMin=self.getProperty("FilterByTofMin").value, FilterByTofMax=self.getProperty("FilterByTofMax").value) if _load_inst: LoadInstrument( Workspace='__bkg', Filename=self.getProperty("LoadInstrument").value, RewriteSpectraMap=False) if _detcal: LoadIsawDetCal(InputWorkspace='__bkg', Filename=self.getProperty("DetCal").value) MaskDetectors(Workspace='__bkg', MaskedWorkspace='__sa') ConvertUnits(InputWorkspace='__bkg', OutputWorkspace='__bkg', Target='Momentum') CropWorkspace(InputWorkspace='__bkg', OutputWorkspace='__bkg', XMin=XMin, XMax=XMax) progress = Progress( self, 0.0, 1.0, len(UBList) * len(self.getProperty("Filename").value)) for run in self.getProperty("Filename").value: logger.notice("Working on " + run) Load(Filename=run, OutputWorkspace='__run', FilterByTofMin=self.getProperty("FilterByTofMin").value, FilterByTofMax=self.getProperty("FilterByTofMax").value) if _load_inst: LoadInstrument( Workspace='__run', Filename=self.getProperty("LoadInstrument").value, RewriteSpectraMap=False) if _detcal: LoadIsawDetCal(InputWorkspace='__run', Filename=self.getProperty("DetCal").value) MaskDetectors(Workspace='__run', MaskedWorkspace='__sa') ConvertUnits(InputWorkspace='__run', OutputWorkspace='__run', Target='Momentum') CropWorkspace(InputWorkspace='__run', OutputWorkspace='__run', XMin=XMin, XMax=XMax) if self.getProperty('SetGoniometer').value: SetGoniometer( Workspace='__run', Goniometers=self.getProperty('Goniometers').value, Axis0=self.getProperty('Axis0').value, Axis1=self.getProperty('Axis1').value, Axis2=self.getProperty('Axis2').value) # Set background Goniometer to be the same as data if _background: mtd['__bkg'].run().getGoniometer().setR( mtd['__run'].run().getGoniometer().getR()) for ub in UBList: SetUB(Workspace='__run', UB=ub) ConvertToMD(InputWorkspace='__run', OutputWorkspace='__md', QDimensions='Q3D', dEAnalysisMode='Elastic', Q3DFrames='HKL', QConversionScales='HKL', Uproj=self.getProperty('Uproj').value, Vproj=self.getProperty('Vproj').value, Wproj=self.getProperty('wproj').value, MinValues=MinValues, MaxValues=MaxValues) MDNormSCD( InputWorkspace=mtd['__md'], FluxWorkspace='__flux', SolidAngleWorkspace='__sa', OutputWorkspace='__data', SkipSafetyCheck=True, TemporaryDataWorkspace='__data' if mtd.doesExist('__data') else None, OutputNormalizationWorkspace='__norm', TemporaryNormalizationWorkspace='__norm' if mtd.doesExist('__norm') else None, AlignedDim0=mtd['__md'].getDimension(0).name + AlignedDim0, AlignedDim1=mtd['__md'].getDimension(1).name + AlignedDim1, AlignedDim2=mtd['__md'].getDimension(2).name + AlignedDim2) DeleteWorkspace('__md') if _background: SetUB(Workspace='__bkg', UB=ub) ConvertToMD(InputWorkspace='__bkg', OutputWorkspace='__bkg_md', QDimensions='Q3D', dEAnalysisMode='Elastic', Q3DFrames='HKL', QConversionScales='HKL', Uproj=self.getProperty('Uproj').value, Vproj=self.getProperty('Vproj').value, Wproj=self.getProperty('Wproj').value, MinValues=MinValues, MaxValues=MaxValues) MDNormSCD( InputWorkspace='__bkg_md', FluxWorkspace='__flux', SolidAngleWorkspace='__sa', SkipSafetyCheck=True, OutputWorkspace='__bkg_data', TemporaryDataWorkspace='__bkg_data' if mtd.doesExist('__bkg_data') else None, OutputNormalizationWorkspace='__bkg_norm', TemporaryNormalizationWorkspace='__bkg_norm' if mtd.doesExist('__bkg_norm') else None, AlignedDim0=mtd['__bkg_md'].getDimension(0).name + AlignedDim0, AlignedDim1=mtd['__bkg_md'].getDimension(1).name + AlignedDim1, AlignedDim2=mtd['__bkg_md'].getDimension(2).name + AlignedDim2) DeleteWorkspace('__bkg_md') progress.report() DeleteWorkspace('__run') if _background: # outWS = data / norm - bkg_data / bkg_norm * BackgroundScale DivideMD(LHSWorkspace='__data', RHSWorkspace='__norm', OutputWorkspace=_outWS_name + '_normalizedData') DivideMD(LHSWorkspace='__bkg_data', RHSWorkspace='__bkg_norm', OutputWorkspace=_outWS_name + '_normalizedBackground') CreateSingleValuedWorkspace( OutputWorkspace='__scale', DataValue=self.getProperty('BackgroundScale').value) MultiplyMD(LHSWorkspace=_outWS_name + '_normalizedBackground', RHSWorkspace='__scale', OutputWorkspace='__scaled_background') DeleteWorkspace('__scale') MinusMD(LHSWorkspace=_outWS_name + '_normalizedData', RHSWorkspace='__scaled_background', OutputWorkspace=_outWS_name) if self.getProperty('KeepTemporaryWorkspaces').value: RenameWorkspaces(InputWorkspaces=[ '__data', '__norm', '__bkg_data', '__bkg_norm' ], WorkspaceNames=[ _outWS_name + '_data', _outWS_name + '_normalization', _outWS_name + '_background_data', _outWS_name + '_background_normalization' ]) else: # outWS = data / norm DivideMD(LHSWorkspace='__data', RHSWorkspace='__norm', OutputWorkspace=_outWS_name) if self.getProperty('KeepTemporaryWorkspaces').value: RenameWorkspaces(InputWorkspaces=['__data', '__norm'], WorkspaceNames=[ _outWS_name + '_data', _outWS_name + '_normalization' ]) self.setProperty("OutputWorkspace", mtd[_outWS_name]) # remove temp workspaces [ DeleteWorkspace(ws) for ws in self.temp_workspace_list if mtd.doesExist(ws) ]
def load_and_group(self, runs: List[str]) -> IMDHistoWorkspace: """ Load the data with given grouping """ # grouping config grouping = self.getProperty("Grouping").value if grouping == 'None': grouping = 1 else: grouping = 2 if grouping == '2x2' else 4 number_of_runs = len(runs) x_dim = 480 * 8 // grouping y_dim = 512 // grouping data_array = np.empty((number_of_runs, x_dim, y_dim), dtype=np.float64) s1_array = [] duration_array = [] run_number_array = [] monitor_count_array = [] progress = Progress(self, 0.0, 1.0, number_of_runs + 3) for n, run in enumerate(runs): progress.report('Loading: ' + run) with h5py.File(run, 'r') as f: bc = np.zeros((512 * 480 * 8), dtype=np.int64) for b in range(8): bc += np.bincount(f['/entry/bank' + str(b + 1) + '_events/event_id'].value, minlength=512 * 480 * 8) bc = bc.reshape((480 * 8, 512)) if grouping == 2: bc = bc[::2, ::2] + bc[1::2, ::2] + bc[::2, 1::2] + bc[1::2, 1::2] elif grouping == 4: bc = bc[::4, ::4] + bc[1::4, ::4] + bc[2::4, ::4] + bc[3::4, ::4] + bc[::4, 1::4] + bc[1::4, 1::4] + bc[2::4, 1::4] + \ bc[3::4, 1::4] + bc[::4, 2::4] + bc[1::4, 2::4] + bc[2::4, 2::4] + bc[3::4, 2::4] + bc[::4, 3::4] + \ bc[1::4, 3::4] + bc[2::4, 3::4] + bc[3::4, 3::4] data_array[n] = bc s1_array.append( f['/entry/DASlogs/HB2C:Mot:s1.RBV/average_value'].value[0]) duration_array.append(float(f['/entry/duration'].value[0])) run_number_array.append(float(f['/entry/run_number'].value[0])) monitor_count_array.append( float(f['/entry/monitor1/total_counts'].value[0])) progress.report('Creating MDHistoWorkspace') createWS_alg = self.createChildAlgorithm("CreateMDHistoWorkspace", enableLogging=False) createWS_alg.setProperty("SignalInput", data_array) createWS_alg.setProperty("ErrorInput", np.sqrt(data_array)) createWS_alg.setProperty("Dimensionality", 3) createWS_alg.setProperty( "Extents", '0.5,{},0.5,{},0.5,{}'.format(y_dim + 0.5, x_dim + 0.5, number_of_runs + 0.5)) createWS_alg.setProperty( "NumberOfBins", '{},{},{}'.format(y_dim, x_dim, number_of_runs)) createWS_alg.setProperty("Names", 'y,x,scanIndex') createWS_alg.setProperty("Units", 'bin,bin,number') createWS_alg.execute() outWS = createWS_alg.getProperty("OutputWorkspace").value progress.report('Getting IDF') # Get the instrument and some logs from the first file; assume the rest are the same _tmp_ws = LoadEventNexus(runs[0], MetaDataOnly=True, EnableLogging=False) # The following logs should be the same for all runs RemoveLogs( _tmp_ws, KeepLogs= 'HB2C:Mot:detz,HB2C:Mot:detz.RBV,HB2C:Mot:s2,HB2C:Mot:s2.RBV,' 'HB2C:Mot:sgl,HB2C:Mot:sgl.RBV,HB2C:Mot:sgu,HB2C:Mot:sgu.RBV,' 'run_title,start_time,experiment_identifier,HB2C:CS:CrystalAlign:UBMatrix', EnableLogging=False) time_ns_array = _tmp_ws.run().startTime().totalNanoseconds( ) + np.append(0, np.cumsum(duration_array) * 1e9)[:-1] try: ub = np.array(re.findall( r'-?\d+\.*\d*', _tmp_ws.run().getProperty( 'HB2C:CS:CrystalAlign:UBMatrix').value[0]), dtype=float).reshape(3, 3) sgl = np.deg2rad(_tmp_ws.run().getProperty( 'HB2C:Mot:sgl.RBV').value[0]) # 'HB2C:Mot:sgl.RBV,1,0,0,-1' sgu = np.deg2rad(_tmp_ws.run().getProperty( 'HB2C:Mot:sgu.RBV').value[0]) # 'HB2C:Mot:sgu.RBV,0,0,1,-1' sgl_a = np.array([[1, 0, 0], [0, np.cos(sgl), np.sin(sgl)], [0, -np.sin(sgl), np.cos(sgl)]]) sgu_a = np.array([[np.cos(sgu), np.sin(sgu), 0], [-np.sin(sgu), np.cos(sgu), 0], [0, 0, 1]]) UB = sgl_a.dot(sgu_a).dot( ub) # Apply the Goniometer tilts to the UB matrix SetUB(_tmp_ws, UB=UB, EnableLogging=False) except (RuntimeError, ValueError): SetUB(_tmp_ws, EnableLogging=False) if grouping > 1: _tmp_group, _, _ = CreateGroupingWorkspace(InputWorkspace=_tmp_ws, EnableLogging=False) group_number = 0 for x in range(0, 480 * 8, grouping): for y in range(0, 512, grouping): group_number += 1 for j in range(grouping): for i in range(grouping): _tmp_group.dataY(y + i + (x + j) * 512)[0] = group_number _tmp_ws = GroupDetectors(InputWorkspace=_tmp_ws, CopyGroupingFromWorkspace=_tmp_group, EnableLogging=False) DeleteWorkspace(_tmp_group, EnableLogging=False) progress.report('Adding logs') # Hack: ConvertToMD is needed so that a deep copy of the ExperimentInfo can happen # outWS.addExperimentInfo(_tmp_ws) # This doesn't work but should, when you delete `ws` `outWS` also loses it's ExperimentInfo _tmp_ws = Rebin(_tmp_ws, '0,1,2', EnableLogging=False) _tmp_ws = ConvertToMD(_tmp_ws, dEAnalysisMode='Elastic', EnableLogging=False, PreprocDetectorsWS='__PreprocessedDetectorsWS') preprocWS = mtd['__PreprocessedDetectorsWS'] twotheta = preprocWS.column(2) azimuthal = preprocWS.column(3) outWS.copyExperimentInfos(_tmp_ws) DeleteWorkspace(_tmp_ws, EnableLogging=False) DeleteWorkspace('__PreprocessedDetectorsWS', EnableLogging=False) # end Hack add_time_series_property('s1', outWS.getExperimentInfo(0).run(), time_ns_array, s1_array) outWS.getExperimentInfo(0).run().getProperty('s1').units = 'deg' add_time_series_property('duration', outWS.getExperimentInfo(0).run(), time_ns_array, duration_array) outWS.getExperimentInfo(0).run().getProperty( 'duration').units = 'second' outWS.getExperimentInfo(0).run().addProperty('run_number', run_number_array, True) add_time_series_property('monitor_count', outWS.getExperimentInfo(0).run(), time_ns_array, monitor_count_array) outWS.getExperimentInfo(0).run().addProperty('twotheta', twotheta, True) outWS.getExperimentInfo(0).run().addProperty('azimuthal', azimuthal, True) setGoniometer_alg = self.createChildAlgorithm("SetGoniometer", enableLogging=False) setGoniometer_alg.setProperty("Workspace", outWS) setGoniometer_alg.setProperty("Axis0", 's1,0,1,0,1') setGoniometer_alg.setProperty("Average", False) setGoniometer_alg.execute() return outWS
def runTest(self): S = np.random.random(32 * 240 * 100) ConvertWANDSCDtoQTest_data = CreateMDHistoWorkspace( Dimensionality=3, Extents='0.5,32.5,0.5,240.5,0.5,100.5', SignalInput=S.ravel('F'), ErrorInput=np.sqrt(S.ravel('F')), NumberOfBins='32,240,100', Names='y,x,scanIndex', Units='bin,bin,number') ConvertWANDSCDtoQTest_dummy = CreateSingleValuedWorkspace() LoadInstrument(ConvertWANDSCDtoQTest_dummy, InstrumentName='WAND', RewriteSpectraMap=False) ConvertWANDSCDtoQTest_data.addExperimentInfo( ConvertWANDSCDtoQTest_dummy) log = FloatTimeSeriesProperty('s1') for t, v in zip(range(100), np.arange(0, 50, 0.5)): log.addValue(t, v) ConvertWANDSCDtoQTest_data.getExperimentInfo(0).run()['s1'] = log ConvertWANDSCDtoQTest_data.getExperimentInfo(0).run().addProperty( 'duration', [60.] * 100, True) ConvertWANDSCDtoQTest_data.getExperimentInfo(0).run().addProperty( 'monitor_count', [120000.] * 100, True) ConvertWANDSCDtoQTest_data.getExperimentInfo(0).run().addProperty( 'twotheta', list(np.linspace(np.pi * 2 / 3, 0, 240).repeat(32)), True) ConvertWANDSCDtoQTest_data.getExperimentInfo(0).run().addProperty( 'azimuthal', list(np.tile(np.linspace(-0.15, 0.15, 32), 240)), True) peaks = CreatePeaksWorkspace(NumberOfPeaks=0, OutputType='LeanElasticPeak') SetUB(ConvertWANDSCDtoQTest_data, 5, 5, 7, 90, 90, 120, u=[-1, 0, 1], v=[1, 0, 1]) SetGoniometer(ConvertWANDSCDtoQTest_data, Axis0='s1,0,1,0,1', Average=False) CopySample(InputWorkspace=ConvertWANDSCDtoQTest_data, OutputWorkspace=peaks, CopyName=False, CopyMaterial=False, CopyEnvironment=False, CopyShape=False, CopyLattice=True) Q = ConvertWANDSCDtoQ(InputWorkspace=ConvertWANDSCDtoQTest_data, UBWorkspace=peaks, Wavelength=1.486, Frame='HKL', Uproj='1,1,0', Vproj='-1,1,0', BinningDim0='-6.04,6.04,151', BinningDim1='-6.04,6.04,151', BinningDim2='-6.04,6.04,151') data_norm = ConvertHFIRSCDtoMDE(ConvertWANDSCDtoQTest_data, Wavelength=1.486, MinValues='-6.04,-6.04,-6.04', MaxValues='6.04,6.04,6.04') HKL = ConvertQtoHKLMDHisto(data_norm, PeaksWorkspace=peaks, Uproj='1,1,0', Vproj='-1,1,0', Extents='-6.04,6.04,-6.04,6.04,-6.04,6.04', Bins='151,151,151') for i in range(HKL.getNumDims()): print(HKL.getDimension(i).getUnits(), Q.getDimension(i).getUnits()) np.testing.assert_equal( HKL.getDimension(i).getUnits(), Q.getDimension(i).getUnits()) hkl_data = mtd["HKL"].getSignalArray() Q_data = mtd["Q"].getSignalArray() print(np.isnan(Q_data).sum()) print(np.isclose(hkl_data, 0).sum()) xaxis = mtd["HKL"].getXDimension() yaxis = mtd["HKL"].getYDimension() zaxis = mtd["HKL"].getZDimension() x, y, z = np.meshgrid( np.linspace(xaxis.getMinimum(), xaxis.getMaximum(), xaxis.getNBins()), np.linspace(yaxis.getMinimum(), yaxis.getMaximum(), yaxis.getNBins()), np.linspace(zaxis.getMinimum(), zaxis.getMaximum(), zaxis.getNBins()), indexing="ij", copy=False, ) print( x[~np.isnan(Q_data)].mean(), y[~np.isnan(Q_data)].mean(), z[~np.isnan(Q_data)].mean(), ) print( x[~np.isclose(hkl_data, 0)].mean(), y[~np.isclose(hkl_data, 0)].mean(), z[~np.isclose(hkl_data, 0)].mean(), ) np.testing.assert_almost_equal(x[~np.isnan(Q_data)].mean(), x[~np.isclose(hkl_data, 0)].mean(), decimal=2) np.testing.assert_almost_equal(y[~np.isnan(Q_data)].mean(), y[~np.isclose(hkl_data, 0)].mean(), decimal=2) np.testing.assert_almost_equal(z[~np.isnan(Q_data)].mean(), z[~np.isclose(hkl_data, 0)].mean(), decimal=1)
def prepare_md(input_ws_name, merged_md_name, min_log_value, max_log_value, log_step, prefix) -> str: """Load raw event Nexus file and reduce to MDEventWorkspace """ # Filter gef_kw_dict = dict() if log_step <= max_log_value - min_log_value: LogValueInterval = log_step gef_kw_dict['LogValueInterval'] = LogValueInterval GenerateEventsFilter(InputWorkspace=input_ws_name, OutputWorkspace='splboth', InformationWorkspace='info', UnitOfTime='Nanoseconds', LogName='s1', MinimumLogValue=min_log_value, MaximumLogValue=max_log_value, **gef_kw_dict) FilterEvents(InputWorkspace=input_ws_name, SplitterWorkspace='splboth', InformationWorkspace='info', FilterByPulseTime=True, GroupWorkspaces=True, OutputWorkspaceIndexedFrom1=True, OutputWorkspaceBaseName='split') # Clean memory DeleteWorkspace('splboth') DeleteWorkspace('info') reduced_ws_group = f'reduced_{prefix}' DgsReduction(SampleInputWorkspace='split', SampleInputMonitorWorkspace='split_1', IncidentEnergyGuess=50, SofPhiEIsDistribution=False, TimeIndepBackgroundSub=True, TibTofRangeStart=10400, TibTofRangeEnd=12400, OutputWorkspace=reduced_ws_group) # Clean memory DeleteWorkspace('split') SetUB(Workspace=reduced_ws_group, a=5.823, b=6.475, c=3.186, u='0,1,0', v='0,0,1') CropWorkspaceForMDNorm(InputWorkspace=reduced_ws_group, XMin=-25, XMax=49, OutputWorkspace=reduced_ws_group) ConvertToMD(InputWorkspace=reduced_ws_group, QDimensions='Q3D', Q3DFrames='Q_sample', OutputWorkspace='md', MinValues='-11,-11,-11,-25', MaxValues='11,11,11,49') if mtd['md'].getNumberOfEntries() == 1: RenameWorkspace(mtd['md'][0], merged_md_name) else: MergeMD(InputWorkspaces='md', OutputWorkspace=merged_md_name) return reduced_ws_group