def PyExec(self): k = self.getProperty("NumOfQs").value peak_radius = self.getProperty("PeakRadius").value background_radii = (self.getProperty("BackgroundInnerRadius").value, self.getProperty("BackgroundOuterRadius").value) I_over_sigma = self.getProperty("IOverSigma").value cluster_threshold = self.getProperty("ClusterThreshold").value # if user did not specify the number of qs then # set the k value to None if k == -1: k = None md = self.getProperty("MDWorkspace").value nuclear = self.getProperty("NuclearPeaks").value sats = self.getProperty("SatellitePeaks").value nuclear_hkls = indexing.get_hkls(nuclear) sats_hkls = indexing.get_hkls(sats) qs = indexing.find_q_vectors(nuclear_hkls, sats_hkls) clusters, k = indexing.cluster_qs(qs, threshold=cluster_threshold, k=k) qs = indexing.average_clusters(qs, clusters) predicted_satellites = self.create_fractional_peaks_workspace( qs, nuclear) centroid_satellites = CentroidPeaksMD( InputWorkspace=md, PeaksWorkspace=predicted_satellites, PeakRadius=peak_radius, StoreInADS=False) satellites_int_spherical = IntegratePeaksMD( InputWorkspace=md, PeaksWorkspace=centroid_satellites, PeakRadius=peak_radius, BackgroundInnerRadius=background_radii[0], BackgroundOuterRadius=background_radii[1], IntegrateIfOnEdge=True, StoreInADS=False) satellites_int_spherical = FilterPeaks(satellites_int_spherical, FilterVariable="Intensity", FilterValue=0, Operator=">", StoreInADS=False) satellites_int_spherical = FilterPeaks(satellites_int_spherical, FilterVariable="Signal/Noise", FilterValue=I_over_sigma, Operator=">", StoreInADS=False) self.log().notice("Q vectors are: \n{}".format(qs)) self.setProperty("OutputWorkspace", satellites_int_spherical)
def test_cluster_qs_with_auto_k(self): qs = np.array([ [0, .1, .1], [0, .1, .1], [0, .0, .1], [0, .0, .1], [0, .1, .1], ]) qs += np.random.random(qs.shape) * 0.01 clusters, k = indexing.cluster_qs(qs, threshold=0.01) self.assertEqual(k, 2) npt.assert_array_equal(clusters, np.array([2, 2, 1, 1, 2]))
def test_cluster_qs_with_fixed_k(self): qs = np.array([ [0, .1, .1], [0, .1, .1], [0, .0, .1], [0, .0, .1], [0, .1, .1], ]) qs += np.random.random(qs.shape) * 0.01 k = 2 clusters, k = indexing.cluster_qs(qs, k) self.assertEqual(k, 2) npt.assert_array_equal(clusters, np.array([0, 0, 1, 1, 0]))
def PyExec(self): tolerance = self.getProperty("Tolerance").value k = int(self.getProperty("NumOfQs").value) nuclear = self.getProperty("NuclearPeaks").value satellites = self.getProperty("SatellitePeaks").value cluster_threshold = self.getProperty("ClusterThreshold").value n_trunc_decimals = int(np.ceil(abs(np.log10(tolerance)))) if nuclear.getNumberPeaks() == 0: raise RuntimeError( "The NuclearPeaks parameter must have at least one peak") if satellites.getNumberPeaks() == 0: raise RuntimeError( "The SatellitePeaks parameter must have at least one peak") nuclear_hkls = indexing.get_hkls(nuclear) sats_hkls = indexing.get_hkls(satellites) qs = indexing.find_q_vectors(nuclear_hkls, sats_hkls) self.log().notice("K value is {}".format(k)) k = None if k == -1 else k clusters, k = indexing.cluster_qs(qs, k=k, threshold=cluster_threshold) qs = indexing.average_clusters(qs, clusters) qs = indexing.trunc_decimals(qs, n_trunc_decimals) qs = indexing.sort_vectors_by_norm(qs) self.log().notice("Q vectors are: \n{}".format(qs)) indices = indexing.index_q_vectors(qs, tolerance) ndim = indices.shape[1] + 3 hkls = indexing.find_nearest_integer_peaks(nuclear_hkls, sats_hkls) hklm = np.zeros((hkls.shape[0], ndim)) hklm[:, :3] = np.round(hkls) raw_qs = hkls - sats_hkls peak_map = KDTree(qs) for i, q in enumerate(raw_qs): distance, index = peak_map.query(q, k=1) hklm[i, 3:] = indices[index] indexed = self.create_indexed_workspace(satellites, ndim, hklm) self.setProperty("OutputWorkspace", indexed)
def PyExec(self): tolerance = self.getProperty("Tolerance").value k = int(self.getProperty("NumOfQs").value) nuclear = self.getProperty("NuclearPeaks").value satellites = self.getProperty("SatellitePeaks").value cluster_threshold = self.getProperty("ClusterThreshold").value n_trunc_decimals = int(np.ceil(abs(np.log10(tolerance)))) if nuclear.getNumberPeaks() == 0: raise RuntimeError("The NuclearPeaks parameter must have at least one peak") if satellites.getNumberPeaks() == 0: raise RuntimeError("The SatellitePeaks parameter must have at least one peak") nuclear_hkls = indexing.get_hkls(nuclear) sats_hkls = indexing.get_hkls(satellites) qs = indexing.find_q_vectors(nuclear_hkls, sats_hkls) self.log().notice("K value is {}".format(k)) k = None if k == -1 else k clusters, k = indexing.cluster_qs(qs, k=k, threshold=cluster_threshold) qs = indexing.average_clusters(qs, clusters) qs = indexing.trunc_decimals(qs, n_trunc_decimals) qs = indexing.sort_vectors_by_norm(qs) self.log().notice("Q vectors are: \n{}".format(qs)) indices = indexing.index_q_vectors(qs, tolerance) ndim = indices.shape[1] + 3 hkls = indexing.find_nearest_integer_peaks(nuclear_hkls, sats_hkls) hklm = np.zeros((hkls.shape[0], ndim)) hklm[:, :3] = np.round(hkls) raw_qs = hkls - sats_hkls peak_map = KDTree(qs) for i, q in enumerate(raw_qs): distance, index = peak_map.query(q, k=1) hklm[i, 3:] = indices[index] indexed = self.create_indexed_workspace(satellites, ndim, hklm) self.setProperty("OutputWorkspace", indexed)