def runTest(self): # Load raw data (bank 1) wsMD = LoadMD( "WISH38237_MD.nxs") # default so doesn't get overwrite van # For each mod vec, predict and integrate peaks and combine qs = [(0.15, 0, 0.3), (-0.15, 0, 0.3)] all_pks = CreatePeaksWorkspace(InstrumentWorkspace=wsMD, NumberOfPeaks=0, OutputWorkspace="all_pks") LoadIsawUB(InputWorkspace=all_pks, Filename='Wish_Diffuse_Scattering_ISAW_UB.mat') # PredictPeaks parent = PredictPeaks(InputWorkspace=all_pks, WavelengthMin=0.8, WavelengthMax=9.3, MinDSpacing=0.5, ReflectionCondition="Primitive") self._pfps = [] self._saved_files = [] for iq, q in enumerate(qs): wsname = f'pfp_{iq}' PredictFractionalPeaks(Peaks=parent, IncludeAllPeaksInRange=True, Hmin=0, Hmax=0, Kmin=1, Kmax=1, Lmin=0, Lmax=1, ReflectionCondition='Primitive', MaxOrder=1, ModVector1=",".join([str(qi) for qi in q]), FracPeaks=wsname) FilterPeaks(InputWorkspace=wsname, OutputWorkspace=wsname, FilterVariable='Wavelength', FilterValue=9.3, Operator='<') # should get rid of one peak in q1 table FilterPeaks(InputWorkspace=wsname, OutputWorkspace=wsname, FilterVariable='Wavelength', FilterValue=0.8, Operator='>') IntegratePeaksMD(InputWorkspace=wsMD, PeakRadius='0.1', BackgroundInnerRadius='0.1', BackgroundOuterRadius='0.15', PeaksWorkspace=wsname, OutputWorkspace=wsname, IntegrateIfOnEdge=False, UseOnePercentBackgroundCorrection=False) all_pks = CombinePeaksWorkspaces(LHSWorkspace=all_pks, RHSWorkspace=wsname) self._pfps.append(ADS.retrieve(wsname)) self._filepath = os.path.join(config['defaultsave.directory'], 'WISH_IntegratedSatellite.int') SaveReflections(InputWorkspace=all_pks, Filename=self._filepath, Format='Jana') self._all_pks = all_pks
def runTest(self): ws = LoadRaw(Filename='WISH00038237.raw', OutputWorkspace='38237') ws = ConvertUnits(ws, 'dSpacing', OutputWorkspace='38237') UB = np.array([[-0.00601763, 0.07397297, 0.05865706], [ 0.05373321, 0.050198, -0.05651455], [-0.07822144, 0.0295911, -0.04489172]]) SetUB(ws, UB=UB) self._peaks = PredictPeaks(ws, WavelengthMin=0.1, WavelengthMax=100, OutputWorkspace='peaks') # We specifically want to check peak -5 -1 -7 exists, so filter for it self._filtered = FilterPeaks(self._peaks, "h^2+k^2+l^2", 75, '=', OutputWorkspace='filtered') SaveIsawPeaks(self._peaks, Filename='WISHSXReductionPeaksTest.peaks')
def setUp(self): # load empty instrument so can create a peak table self.ws = LoadEmptyInstrument(InstrumentName='SXD', OutputWorkspace='sxd') ub = np.array([[-0.00601763, 0.07397297, 0.05865706], [0.05373321, 0.050198, -0.05651455], [-0.07822144, 0.0295911, -0.04489172]]) SetUB(self.ws, UB=ub) PredictPeaks(self.ws, WavelengthMin=1, WavelengthMax=1.1, MinDSpacing=1, MaxDSPacing=1.1, OutputWorkspace='test') # 8 peaks PredictSatellitePeaks(Peaks='test', SatellitePeaks='test_sat', ModVector1='0,0,0.33', MaxOrder=1) self.peaks = CombinePeaksWorkspaces(LHSWorkspace='test_sat', RHSWorkspace='test', OutputWorkspace='test')
class WISHSingleCrystalPeakPredictionTest(MantidSystemTest): """ At the time of writing WISH users rely quite heavily on the PredictPeaks algorithm. As WISH has tubes rather than rectangular detectors sometimes peaks fall between the gaps in the tubes. Here we check that PredictPeaks works on a real WISH dataset & UB. This also includes an example of a peak whose center is predicted to fall between two tubes. """ def requiredFiles(self): return ["WISHPredictedSingleCrystalPeaks.nxs"] def cleanup(self): ADS.clear() try: os.remove(self._peaks_file) except: pass def runTest(self): ws = LoadEmptyInstrument(InstrumentName='WISH') UB = np.array([[-0.00601763, 0.07397297, 0.05865706], [0.05373321, 0.050198, -0.05651455], [-0.07822144, 0.0295911, -0.04489172]]) SetUB(ws, UB=UB) self._peaks = PredictPeaks(ws, WavelengthMin=0.1, WavelengthMax=100, OutputWorkspace='peaks') # We specifically want to check peak -5 -1 -7 exists, so filter for it self._filtered = FilterPeaks(self._peaks, "h^2+k^2+l^2", 75, '=', OutputWorkspace='filtered') SaveIsawPeaks(self._peaks, Filename='WISHSXReductionPeaksTest.peaks') def validate(self): self.assertEqual(self._peaks.rowCount(), 527) self.assertEqual(self._filtered.rowCount(), 7) # The peak at [-5 -1 -7] is known to fall between the gaps of WISH's tubes # Specifically check this one is predicted to exist because past bugs have # been found in the ray tracing. BasicPeak = namedtuple('Peak', ('DetID', 'BankName', 'h', 'k', 'l')) expected = BasicPeak(DetID=9202086, BankName='WISHpanel09', h=-5.0, k=-1.0, l=-7.0) expected_peak_found = False peak_count = self._filtered.rowCount() for i in range( peak_count ): # iterate of the table representation of the PeaksWorkspace peak_row = self._filtered.row(i) peak = BasicPeak(**{k: peak_row[k] for k in BasicPeak._fields}) if peak == expected: expected_peak_found = True break self.assertTrue( expected_peak_found, msg="Peak at {} expected but it was not found".format(expected)) self._peaks_file = os.path.join(config['defaultsave.directory'], 'WISHSXReductionPeaksTest.peaks') self.assertTrue(os.path.isfile(self._peaks_file)) return self._peaks.name(), "WISHPredictedSingleCrystalPeaks.nxs"
def PyExec(self): # create peaks workspace to store linked peaks linked_peaks = CreatePeaksWorkspace( InstrumentWorkspace=self._workspace, NumberOfPeaks=0, StoreInADS=False) # create peaks table to store linked predicted peaks linked_peaks_predicted = CreatePeaksWorkspace( InstrumentWorkspace=self._workspace, NumberOfPeaks=0, StoreInADS=False) for m in range(0, self._iterations): if m == 0: predictor = self._predicted_peaks if m > 0: predictor = linked_peaks_predicted qtol_var = self._qtol * self._qdecrement**m num_peaks_var = self._num_peaks + self._peak_increment * m # add q_lab and dpsacing values of found peaks to a list qlabs_observed = np.array(self._observed_peaks.column("QLab")) dspacings_observed = np.array( self._observed_peaks.column("DSpacing")) # sort the predicted peaks from largest to smallest dspacing qlabs_predicted = np.array(predictor.column("QLab")) dspacings_predicted = np.array(predictor.column("DSpacing")) # get the indexing list that sorts dspacing from largest to # smallest hkls = np.array([[p.getH(), p.getK(), p.getL()] for p in predictor]) idx = dspacings_predicted.argsort()[::-1] HKL_predicted = hkls[idx, :] # sort q, d and h, k, l by this indexing qlabs_predicted = qlabs_predicted[idx] dspacings_predicted = dspacings_predicted[idx] q_ordered = qlabs_predicted[:num_peaks_var] d_ordered = dspacings_predicted[:num_peaks_var] HKL_ordered = HKL_predicted[:num_peaks_var] # loop through the ordered find peaks, compare q and d to each # predicted peak if the q and d values of a found peak match a # predicted peak within tolerance, the found peak inherits # the HKL of the predicted peak for i in range(len(qlabs_observed)): qx_obs, qy_obs, qz_obs = qlabs_observed[i] q_obs = V3D(qx_obs, qy_obs, qz_obs) p_obs = linked_peaks.createPeak(q_obs) d_obs = dspacings_observed[i] for j in range(len(q_ordered)): qx_pred, qy_pred, qz_pred = q_ordered[j] d_pred = d_ordered[j] if (qx_pred - qtol_var <= qx_obs <= qx_pred + qtol_var and qy_pred - qtol_var <= qy_obs <= qy_pred + qtol_var and qz_pred - qtol_var <= qz_obs <= qz_pred + qtol_var and d_pred - self._dtol <= d_obs <= d_pred + self._dtol): h, k, l = HKL_ordered[j] p_obs.setHKL(h, k, l) linked_peaks.addPeak(p_obs) # Clean up peaks where H == K == L == 0 linked_peaks = FilterPeaks(linked_peaks, FilterVariable="h^2+k^2+l^2", Operator="!=", FilterValue="0") # force UB on linked_peaks using known lattice parameters CalculateUMatrix(PeaksWorkspace=linked_peaks, a=self._a, b=self._b, c=self._c, alpha=self._alpha, beta=self._beta, gamma=self._gamma, StoreInADS=False) # new linked predicted peaks linked_peaks_predicted = PredictPeaks( InputWorkspace=linked_peaks, WavelengthMin=self._wavelength_min, WavelengthMax=self._wavelength_max, MinDSpacing=self._min_dspacing, MaxDSpacing=self._max_dspacing, ReflectionCondition=self._reflection_condition, StoreInADS=False) # clean up self.setProperty("LinkedPeaks", linked_peaks) self.setProperty("LinkedPredictedPeaks", linked_peaks_predicted) if mtd.doesExist("linked_peaks"): DeleteWorkspace(linked_peaks) if mtd.doesExist("linked_peaks_predicted"): DeleteWorkspace(linked_peaks_predicted) if self._delete_ws: DeleteWorkspace(self._workspace)
class WISHSingleCrystalPeakPredictionTest(MantidSystemTest): """ At the time of writing WISH users rely quite heavily on the PredictPeaks algorithm. As WISH has tubes rather than rectangular detectors sometimes peaks fall between the gaps in the tubes. Here we check that PredictPeaks works on a real WISH dataset & UB. This also includes an example of a peak whose center is predicted to fall between two tubes. """ def requiredFiles(self): return ["WISH00038237.raw", "WISHPredictedSingleCrystalPeaks.nxs"] def requiredMemoryMB(self): # Need lots of memory for full WISH dataset return 24000 def cleanup(self): try: os.path.remove(self._peaks_file) except: pass def runTest(self): ws = LoadRaw(Filename='WISH00038237.raw', OutputWorkspace='38237') ws = ConvertUnits(ws, 'dSpacing', OutputWorkspace='38237') UB = np.array([[-0.00601763, 0.07397297, 0.05865706], [0.05373321, 0.050198, -0.05651455], [-0.07822144, 0.0295911, -0.04489172]]) SetUB(ws, UB=UB) self._peaks = PredictPeaks(ws, WavelengthMin=0.1, WavelengthMax=100, OutputWorkspace='peaks') # We specifically want to check peak -5 -1 -7 exists, so filter for it self._filtered = FilterPeaks(self._peaks, "h^2+k^2+l^2", 75, '=', OutputWorkspace='filtered') SaveIsawPeaks(self._peaks, Filename='WISHSXReductionPeaksTest.peaks') def validate(self): self.assertEqual(self._peaks.rowCount(), 510) self.assertEqual(self._filtered.rowCount(), 6) # The peak at [-5 -1 -7] is known to fall between the gaps of WISH's tubes # Specifically check this one is predicted to exist because past bugs have # been found in the ray tracing. BasicPeak = namedtuple('Peak', ('DetID', 'BankName', 'h', 'k', 'l')) expected = BasicPeak(DetID=9202086, BankName='WISHpanel09', h=-5.0, k=-1.0, l=-7.0) expected_peak_found = False for full_peak in self._filtered: peak = BasicPeak(DetID=full_peak.getDetectorID(), BankName=full_peak.getBankName(), h=full_peak.getH(), k=full_peak.getK(), l=full_peak.getL()) if peak == expected: expected_peak_found = True break #endfor self.assertTrue( expected_peak_found, msg="Peak at {} expected but it was not found".format(expected)) self._peaks_file = os.path.join(config['defaultsave.directory'], 'WISHSXReductionPeaksTest.peaks') self.assertTrue(os.path.isfile(self._peaks_file)) return self._peaks.name(), "WISHPredictedSingleCrystalPeaks.nxs"
Tolerance=tolerance) print peaks_ws.sample().getOrientedLattice() indexed = IndexPeaks(PeaksWorkspace=peaks_ws, Tolerance=tolerance) print("Number of Indexed Peaks: {:d}".format(indexed[0])) # # Get complete list of peaks to be integrated and load the UB matrix into # the predicted peaks workspace, so that information can be used by the # PeakIntegration algorithm. # if integrate_predicted_peaks: print "PREDICTING peaks to integrate...." peaks_ws = PredictPeaks(InputWorkspace=peaks_ws, WavelengthMin=min_pred_wl, WavelengthMax=max_pred_wl, MinDSpacing=min_pred_dspacing, MaxDSpacing=max_pred_dspacing, ReflectionCondition='Primitive') #Remove peaks on detector edge peaks_on_edge = [] for i in range(peaks_ws.getNumberPeaks()): pi = peaks_ws.getPeak(i) if pi.getRow() < 16 or pi.getRow() > 240 or pi.getCol( ) < 16 or pi.getCol() > 240: peaks_on_edge.append(i) DeleteTableRows(TableWorkspace=peaks_ws, Rows=peaks_on_edge) # #Find peak centroids from predicted peak position on detector face in event workspace peaks_ws = CentroidPeaks(InPeaksWorkspace=peaks_ws, InputWorkspace=event_ws, PeakRadius=4,
class WISHSingleCrystalPeakPredictionTest(MantidSystemTest): """ At the time of writing WISH users rely quite heavily on the PredictPeaks algorithm. As WISH has tubes rather than rectangular detectors sometimes peaks fall between the gaps in the tubes. Here we check that PredictPeaks works on a real WISH dataset & UB. This also includes an example of a peak whose center is predicted to fall between two tubes. """ def requiredFiles(self): return ["WISH00038237.raw", "WISHPredictedSingleCrystalPeaks.nxs"] def requiredMemoryMB(self): # Need lots of memory for full WISH dataset return 16000 def cleanup(self): try: os.path.remove(self._peaks_file) except: pass def runTest(self): ws = LoadRaw(Filename='WISH00038237.raw', OutputWorkspace='38237') ws = ConvertUnits(ws, 'dSpacing', OutputWorkspace='38237') UB = np.array([[-0.00601763, 0.07397297, 0.05865706], [ 0.05373321, 0.050198, -0.05651455], [-0.07822144, 0.0295911, -0.04489172]]) SetUB(ws, UB=UB) self._peaks = PredictPeaks(ws, WavelengthMin=0.1, WavelengthMax=100, OutputWorkspace='peaks') # We specifically want to check peak -5 -1 -7 exists, so filter for it self._filtered = FilterPeaks(self._peaks, "h^2+k^2+l^2", 75, '=', OutputWorkspace='filtered') SaveIsawPeaks(self._peaks, Filename='WISHSXReductionPeaksTest.peaks') def validate(self): self.assertEqual(self._peaks.rowCount(), 510) self.assertEqual(self._filtered.rowCount(), 6) # The peak at [-5 -1 -7] is known to fall between the gaps of WISH's tubes # Specifically check this one is predicted to exist because past bugs have # been found in the ray tracing. Peak = namedtuple('Peak', ('DetID', 'BankName', 'h', 'k', 'l')) expected = Peak(DetID=9202086, BankName='WISHpanel09', h=-5.0, k=-1.0, l=-7.0) expected_peak_found = False for row in self._filtered: peak = Peak(DetID=row['DetID'], BankName=row['BankName'], h=row['h'], k=row['k'], l=row['l']) if peak == expected: expected_peak_found = True break #endfor self.assertTrue(expected_peak_found, msg="Peak at {} expected but it was not found".format(expected)) self._peaks_file = os.path.join(config['defaultsave.directory'], 'WISHSXReductionPeaksTest.peaks') self.assertTrue(os.path.isfile(self._peaks_file)) return self._peaks.name(), "WISHPredictedSingleCrystalPeaks.nxs"
def PyExec(self): input_ws = self.getProperty("InputWorkspace").value ub_ws = self.getProperty("UBWorkspace").value output_ws = self.getProperty("OutputWorkspace").valueAsStr reflection_condition = self.getProperty("ReflectionCondition").value # Whether to use the inner goniometer depending on omega and phi in sample logs use_inner = False min_angle = None max_angle = None wavelength = 0.0 if input_ws.getNumExperimentInfo() == 0: # Warn if we could extract a wavelength from the workspace raise RuntimeWarning("No experiment info was found in input '{}'".format(input_ws.getName())) exp_info = input_ws.getExperimentInfo(0) if exp_info.run().hasProperty("wavelength"): wavelength = exp_info.run().getProperty("wavelength").value if exp_info.run().hasProperty("omega") and exp_info.run().hasProperty("phi"): gon = exp_info.run().getGoniometer().getEulerAngles('YZY') if np.isclose(exp_info.run().getTimeAveragedStd("omega"), 0.0): use_inner = True min_angle = -exp_info.run().getLogData('phi').value.max() max_angle = -exp_info.run().getLogData('phi').value.min() # Sometimes you get the 180 degrees off what is expected from the log phi_log = -exp_info.run().getLogData('phi').value[0] if np.isclose(phi_log + 180, gon[2]): min_angle += 180 max_angle += 180 elif np.isclose(phi_log - 180, gon[2]): min_angle -= 180 max_angle -= 180 elif np.isclose(exp_info.run().getTimeAveragedStd("phi"), 0.0): use_inner = False min_angle = -exp_info.run().getLogData('omega').value.max() max_angle = -exp_info.run().getLogData('omega').value.min() # Sometimes you get the 180 degrees off what is expected from the log omega_log = -exp_info.run().getLogData('omega').value[0] if np.isclose(omega_log + 180, gon[0]): min_angle += 180 max_angle += 180 elif np.isclose(omega_log - 180, gon[0]): min_angle -= 180 max_angle -= 180 else: self.log().warning("No appropriate goniometer rotation found, try anyway") self.log().information("Using inner goniometer: {}".format(use_inner)) if not self.getProperty("Wavelength").isDefault: wavelength = self.getProperty("Wavelength").value elif wavelength == 0: raise RuntimeWarning("No wavelength found, you need to provide one") # temporary set UB on workspace if one is provided by UBWorkspace tmp_ws_name = '__HB3APredictPeaks_UB_tmp' if ub_ws is not None: input_ws = CloneMDWorkspace(InputWorkspace=input_ws, OutputWorkspace=tmp_ws_name) CopySample(InputWorkspace=ub_ws, OutputWorkspace=tmp_ws_name, CopyName=False, CopyMaterial=False, CopyEnvironment=False, CopyShape=False, CopyLattice=True) if self.getProperty("SatellitePeaks").value: peaks = PredictPeaks(InputWorkspace=input_ws, ReflectionCondition=reflection_condition, MinDSpacing=self.getProperty("MinDSpacing").value, MaxDSpacing=self.getProperty("MaxDSpacing").value, OutputType='LeanElasticPeak', CalculateWavelength=False, OutputWorkspace=output_ws) peaks = PredictSatellitePeaks(peaks, ModVector1=self.getProperty("ModVector1").value, ModVector2=self.getProperty("ModVector2").value, ModVector3=self.getProperty("ModVector3").value, MaxOrder=self.getProperty("MaxOrder").value, GetModVectorsFromUB=self.getProperty("GetModVectorsFromUB").value, CrossTerms=self.getProperty("CrossTerms").value, IncludeIntegerHKL=self.getProperty("IncludeIntegerHKL").value, MinDSpacing=self.getProperty("MinDSpacing").value, MaxDSpacing=self.getProperty("MaxDSpacing").value, SatellitePeaks=output_ws) HFIRCalculateGoniometer(peaks, Wavelength=wavelength) else: peaks = PredictPeaks(InputWorkspace=input_ws, ReflectionCondition=reflection_condition, CalculateGoniometerForCW=True, Wavelength=wavelength, FlipX=True, InnerGoniometer=use_inner, MinAngle=min_angle, MaxAngle=max_angle, MinDSpacing=self.getProperty("MinDSpacing").value, MaxDSpacing=self.getProperty("MaxDSpacing").value, OutputWorkspace=output_ws) # delete tmp workspace if mtd.doesExist(tmp_ws_name): DeleteWorkspace(tmp_ws_name) self.setProperty("OutputWorkspace", peaks)