def _create_peaks_workspace(self): """Create a dummy peaks workspace""" path = FileFinder.getFullPath( "IDFs_for_UNIT_TESTING/MINITOPAZ_Definition.xml") inst = LoadEmptyInstrument(Filename=path) ws = CreatePeaksWorkspace(inst, 0) DeleteWorkspace(inst) SetUB(ws, 1, 1, 1, 90, 90, 90) # Add a bunch of random peaks that happen to fall on the # detetor bank defined in the IDF center_q = np.array([-5.1302, 2.5651, 3.71809]) qs = [] for i in np.arange(0, 1, 0.1): for j in np.arange(-0.5, 0, 0.1): q = center_q.copy() q[1] += j q[2] += i qs.append(q) # Add the peaks to the PeaksWorkspace with dummy values for intensity, # Sigma, and HKL for q in qs: peak = ws.createPeak(q) peak.setIntensity(100) peak.setSigmaIntensity(10) peak.setHKL(1, 1, 1) ws.addPeak(peak) return ws
def _create_peaks_workspace(self): """Create a dummy peaks workspace""" path = FileFinder.getFullPath("IDFs_for_UNIT_TESTING/MINITOPAZ_Definition.xml") inst = LoadEmptyInstrument(Filename=path) ws = CreatePeaksWorkspace(inst, 0) DeleteWorkspace(inst) SetUB(ws, 1, 1, 1, 90, 90, 90) # Add a bunch of random peaks that happen to fall on the # detetor bank defined in the IDF center_q = np.array([-5.1302,2.5651,3.71809]) qs = [] for i in np.arange(0, 1, 0.1): for j in np.arange(-0.5, 0, 0.1): q = center_q.copy() q[1] += j q[2] += i qs.append(q) # Add the peaks to the PeaksWorkspace with dummy values for intensity, # Sigma, and HKL for q in qs: peak = ws.createPeak(q) peak.setIntensity(100) peak.setSigmaIntensity(10) peak.setHKL(1, 1, 1) ws.addPeak(peak) return ws
def test_handles_inaccurate_goniometer(self): peaks1 = CreatePeaksWorkspace(InstrumentWorkspace=self.ws, NumberOfPeaks=0, OutputWorkspace="SXD_peaks3") peaks2 = CloneWorkspace(InputWorkspace=peaks1, OutputWorkspace="SXD_peaks4") # set different gonio on each run rot = 5 SetGoniometer(Workspace=peaks1, Axis0=f'{-rot},0,1,0,1') SetGoniometer(Workspace=peaks2, Axis0=f'{rot},0,1,0,1') # Add peaks at QLab corresponding to slightly different gonio rotations UB = np.diag([0.25, 0.25, 0.1]) # alatt = [4,4,10] for h in range(0, 3): for k in range(0, 3): hkl = np.array([h, k, 4]) qlab = 2 * np.pi * np.matmul( np.matmul(getR(-(rot + 1), [0, 1, 0]), UB), hkl) pk = peaks1.createPeak(qlab) peaks1.addPeak(pk) qlab = 2 * np.pi * np.matmul( np.matmul(getR(rot + 1, [0, 1, 0]), UB), hkl) pk = peaks2.createPeak(qlab) peaks2.addPeak(pk) FindGlobalBMatrix(PeakWorkspaces=[peaks1, peaks2], a=4.15, b=3.95, c=10, alpha=88, beta=88, gamma=89, Tolerance=0.15) # check lattice - shouldn't be effected by error in goniometer self.assert_lattice([peaks1, peaks2], 4.0, 4.0, 10.0, 90.0, 90.0, 90.0, delta_latt=2e-2, delta_angle=2.5e-1) self.assert_matrix([peaks1], getBMatrix(peaks2), getBMatrix, delta=1e-10) # should have same B matrix self.assert_matrix([peaks1, peaks2], np.eye(3), getUMatrix, delta=5e-2)
def PyExec(self): # create peaks workspace to store linked peaks linked_peaks = CreatePeaksWorkspace( InstrumentWorkspace=self._workspace, NumberOfPeaks=0, StoreInADS=False) # create peaks table to store linked predicted peaks linked_peaks_predicted = CreatePeaksWorkspace( InstrumentWorkspace=self._workspace, NumberOfPeaks=0, StoreInADS=False) for m in range(0, self._iterations): if m == 0: predictor = self._predicted_peaks if m > 0: predictor = linked_peaks_predicted qtol_var = self._qtol * self._qdecrement**m num_peaks_var = self._num_peaks + self._peak_increment * m # add q_lab and dpsacing values of found peaks to a list qlabs_observed = np.array(self._observed_peaks.column("QLab")) dspacings_observed = np.array( self._observed_peaks.column("DSpacing")) # sort the predicted peaks from largest to smallest dspacing qlabs_predicted = np.array(predictor.column("QLab")) dspacings_predicted = np.array(predictor.column("DSpacing")) # get the indexing list that sorts dspacing from largest to # smallest hkls = np.array([[p.getH(), p.getK(), p.getL()] for p in predictor]) idx = dspacings_predicted.argsort()[::-1] HKL_predicted = hkls[idx, :] # sort q, d and h, k, l by this indexing qlabs_predicted = qlabs_predicted[idx] dspacings_predicted = dspacings_predicted[idx] q_ordered = qlabs_predicted[:num_peaks_var] d_ordered = dspacings_predicted[:num_peaks_var] HKL_ordered = HKL_predicted[:num_peaks_var] # loop through the ordered find peaks, compare q and d to each # predicted peak if the q and d values of a found peak match a # predicted peak within tolerance, the found peak inherits # the HKL of the predicted peak for i in range(len(qlabs_observed)): qx_obs, qy_obs, qz_obs = qlabs_observed[i] q_obs = V3D(qx_obs, qy_obs, qz_obs) p_obs = linked_peaks.createPeak(q_obs) d_obs = dspacings_observed[i] for j in range(len(q_ordered)): qx_pred, qy_pred, qz_pred = q_ordered[j] d_pred = d_ordered[j] if (qx_pred - qtol_var <= qx_obs <= qx_pred + qtol_var and qy_pred - qtol_var <= qy_obs <= qy_pred + qtol_var and qz_pred - qtol_var <= qz_obs <= qz_pred + qtol_var and d_pred - self._dtol <= d_obs <= d_pred + self._dtol): h, k, l = HKL_ordered[j] p_obs.setHKL(h, k, l) linked_peaks.addPeak(p_obs) # Clean up peaks where H == K == L == 0 linked_peaks = FilterPeaks(linked_peaks, FilterVariable="h^2+k^2+l^2", Operator="!=", FilterValue="0") # force UB on linked_peaks using known lattice parameters CalculateUMatrix(PeaksWorkspace=linked_peaks, a=self._a, b=self._b, c=self._c, alpha=self._alpha, beta=self._beta, gamma=self._gamma, StoreInADS=False) # new linked predicted peaks linked_peaks_predicted = PredictPeaks( InputWorkspace=linked_peaks, WavelengthMin=self._wavelength_min, WavelengthMax=self._wavelength_max, MinDSpacing=self._min_dspacing, MaxDSpacing=self._max_dspacing, ReflectionCondition=self._reflection_condition, StoreInADS=False) # clean up self.setProperty("LinkedPeaks", linked_peaks) self.setProperty("LinkedPredictedPeaks", linked_peaks_predicted) if mtd.doesExist("linked_peaks"): DeleteWorkspace(linked_peaks) if mtd.doesExist("linked_peaks_predicted"): DeleteWorkspace(linked_peaks_predicted) if self._delete_ws: DeleteWorkspace(self._workspace)
def PyExec(self): # create peaks workspace to store linked peaks linked_peaks = CreatePeaksWorkspace( InstrumentWorkspace=self._workspace, NumberOfPeaks=0, StoreInADS=False) # create peaks table to store linked predicted peaks linked_peaks_predicted = CreatePeaksWorkspace( InstrumentWorkspace=self._workspace, NumberOfPeaks=0, StoreInADS=False) for m in range(0, self._iterations): if m == 0: predictor = self._predicted_peaks if m > 0: predictor = linked_peaks_predicted qtol_var = self._qtol * self._qdecrement**m num_peaks_var = self._num_peaks + self._peak_increment * m # add q_lab and dpsacing values of found peaks to a list qlabs_observed = np.array(self._observed_peaks.column(15)) dspacings_observed = np.array(self._observed_peaks.column(8)) # sort the predicted peaks from largest to smallest dspacing qlabs_predicted = np.array(predictor.column(15)) dspacings_predicted = np.array(predictor.column(8)) # get the indexing list that sorts dspacing from largest to # smallest hkls = np.array([[p['h'], p['k'], p['l']] for p in predictor]) idx = dspacings_predicted.argsort()[::-1] HKL_predicted = hkls[idx, :] # sort q, d and h, k, l by this indexing qlabs_predicted = qlabs_predicted[idx] dspacings_predicted = dspacings_predicted[idx] q_ordered = qlabs_predicted[:num_peaks_var] d_ordered = dspacings_predicted[:num_peaks_var] HKL_ordered = HKL_predicted[:num_peaks_var] # loop through the ordered find peaks, compare q and d to each # predicted peak if the q and d values of a found peak match a # predicted peak within tolerance, the found peak inherits # the HKL of the predicted peak for i in range(len(qlabs_observed)): qx_obs, qy_obs, qz_obs = qlabs_observed[i] q_obs = V3D(qx_obs, qy_obs, qz_obs) p_obs = linked_peaks.createPeak(q_obs) d_obs = dspacings_observed[i] for j in range(len(q_ordered)): qx_pred, qy_pred, qz_pred = q_ordered[j] d_pred = d_ordered[j] if (qx_pred - qtol_var <= qx_obs <= qx_pred + qtol_var and qy_pred - qtol_var <= qy_obs <= qy_pred + qtol_var and qz_pred - qtol_var <= qz_obs <= qz_pred + qtol_var and d_pred - self._dtol <= d_obs <= d_pred + self._dtol): h, k, l = HKL_ordered[j] p_obs.setHKL(h, k, l) linked_peaks.addPeak(p_obs) # Clean up peaks where H == K == L == 0 linked_peaks = FilterPeaks(linked_peaks, FilterVariable="h^2+k^2+l^2", Operator="!=", FilterValue="0") # force UB on linked_peaks using known lattice parameters CalculateUMatrix(PeaksWorkspace=linked_peaks, a=self._a, b=self._b, c=self._c, alpha=self._alpha, beta=self._beta, gamma=self._gamma, StoreInADS=False) # new linked predicted peaks linked_peaks_predicted = PredictPeaks( InputWorkspace=linked_peaks, WavelengthMin=self._wavelength_min, WavelengthMax=self._wavelength_max, MinDSpacing=self._min_dspacing, MaxDSpacing=self._max_dspacing, ReflectionCondition=self._reflection_condition, StoreInADS=False) # clean up self.setProperty("LinkedPeaks", linked_peaks) self.setProperty("LinkedPredictedPeaks", linked_peaks_predicted) if mtd.doesExist("linked_peaks"): DeleteWorkspace(linked_peaks) if mtd.doesExist("linked_peaks_predicted"): DeleteWorkspace(linked_peaks_predicted) if self._delete_ws: DeleteWorkspace(self._workspace)