def _pairwise_similarity_matrix(input_polydata, threshold, sigma, number_of_jobs=3, landmarks=None): """ Internal convenience function available to clustering routines. Computes similarity matrix (nxn) for all n fibers in input polydata. Calls function _pairwise_distance_matrix first. For distance computation and conversion to similarity uses parameters threshold and sigma. """ distances = _pairwise_distance_matrix(input_polydata, threshold, number_of_jobs, landmarks) # similarity matrix sigmasq = sigma * sigma similarity_matrix = similarity.distance_to_similarity(distances, sigmasq) # sanity check that on-diagonal elements are all 1 #print "This should be 1.0: ", numpy.min(numpy.diag(similarity_matrix)) #print numpy.min(numpy.diag(similarity_matrix)) == 1.0 # test if __debug__: # this tests that on-diagonal elements are all 1 test = numpy.min(numpy.diag(similarity_matrix)) == 1.0 if not test: print "<cluster.py> ERROR: On-diagonal elements are not all 1.0." raise AssertionError return similarity_matrix
def _rectangular_similarity_matrix(input_polydata_n, input_polydata_m, threshold, sigma, number_of_jobs=3, landmarks_n=None, landmarks_m=None, distance_method='Hausdorff', bilateral=False): """ Internal convenience function available to clustering routines. Computes similarity matrix (nxn) for all n fibers in input polydata. Calls function _pairwise_distance_matrix first. For distance computation and conversion to similarity uses parameters threshold and sigma. """ distances = _rectangular_distance_matrix(input_polydata_n, input_polydata_m, threshold, number_of_jobs, landmarks_n, landmarks_m, distance_method, bilateral=bilateral) if distance_method == 'StrictSimilarity': similarity_matrix = distances else: # similarity matrix sigmasq = sigma * sigma similarity_matrix = similarity.distance_to_similarity(distances, sigmasq) return similarity_matrix