def read_hcp_atlas_16_bundles(): """ XXX """ bundle_dict = {} _, folder = fetch_hcp_atlas_16_bundles() whole_brain = load_tractogram(op.join(folder, 'Atlas_in_MNI_Space_16_bundles', 'whole_brain', 'whole_brain_MNI.trk'), 'same', bbox_valid_check=False).streamlines bundle_dict['whole_brain'] = whole_brain bundle_files = glob( op.join(folder, "Atlas_in_MNI_Space_16_bundles", "bundles", "*.trk")) for bundle_file in bundle_files: bundle = op.splitext(op.split(bundle_file)[-1])[0] bundle_dict[bundle] = {} bundle_dict[bundle]['sl'] = load_tractogram(bundle_file, 'same', bbox_valid_check=False)\ .streamlines feature = ResampleFeature(nb_points=100) metric = AveragePointwiseEuclideanMetric(feature) qb = QuickBundles(np.inf, metric=metric) cluster = qb.cluster(bundle_dict[bundle]['sl']) bundle_dict[bundle]['centroid'] = cluster.centroids[0] # For some reason, this file-name has a 0 in it, instead of an O: bundle_dict["IFOF_R"] = bundle_dict["IF0F_R"] del bundle_dict["IF0F_R"] return bundle_dict
def produce_Clusters(truth_list, thresh): feature = ResampleFeature(nb_points=24) metric = AveragePointwiseEuclideanMetric(feature=feature) qb = QuickBundles(threshold = thresh, metric = metric) clusters = qb.cluster(truth_list) return qb, clusters
def get_centroid_streamline(streamlines, nb_points): resample_feature = ResampleFeature(nb_points=nb_points) quick_bundle = QuickBundles( threshold=np.inf, metric=AveragePointwiseEuclideanMetric(resample_feature)) clusters = quick_bundle.cluster(streamlines) centroid_streamlines = clusters.centroids return centroid_streamlines
def get_centroid_streamline(tractogram, nb_points, distance_threshold): streamlines = tractogram.streamlines resample_feature = ResampleFeature(nb_points=nb_points) quick_bundle = QuickBundles( threshold=distance_threshold, metric=AveragePointwiseEuclideanMetric(resample_feature)) clusters = quick_bundle.cluster(streamlines) centroid_streamlines = clusters.centroids if len(centroid_streamlines) > 1: raise Exception('Multiple centroids found') return Tractogram(centroid_streamlines, affine_to_rasmm=np.eye(4))
def __init__(self, thresholds, metric="MDF_12points"): self.thresholds = thresholds if isinstance(metric, MinimumAverageDirectFlipMetric): raise ValueError("Use AveragePointwiseEuclideanMetric instead") if isinstance(metric, Metric): self.metric = metric elif metric == "MDF_12points": feature = ResampleFeature(nb_points=12) self.metric = AveragePointwiseEuclideanMetric(feature) else: raise ValueError("Unknown metric: {0}".format(metric))
def __init__(self, threshold, metric="MDF_12points", max_nb_clusters=np.iinfo('i4').max): self.threshold = threshold self.max_nb_clusters = max_nb_clusters if isinstance(metric, Metric): self.metric = metric elif metric == "MDF_12points": feature = ResampleFeature(nb_points=12) self.metric = AveragePointwiseEuclideanMetric(feature) else: raise ValueError("Unknown metric: {0}".format(metric))
def quickbundles(self, streamlines): """Segment tract with QuickBundles.""" # TODO: implement other metrics try: from dipy.segment.clustering import QuickBundles from dipy.segment.metric import ResampleFeature from dipy.segment.metric import AveragePointwiseEuclideanMetric except ImportError: return None else: feature = ResampleFeature(nb_points=self.qb_points) metric = AveragePointwiseEuclideanMetric(feature=feature) qb = QuickBundles(threshold=self.qb_threshold, metric=metric) clusters = qb.cluster(streamlines) return clusters
def qb_metrics_features(streamlines, threshold=10.0, metric=None, max_nb_clusters=np.iinfo('i4').max): """ Enhancing QuickBundles with different metrics and features metric: 'IF', 'RF', 'CoMF', 'MF', 'AF', 'VBEF', None """ if metric == 'IF': feature = IdentityFeature() metric = AveragePointwiseEuclideanMetric(feature=feature) elif metric == 'RF': feature = ResampleFeature(nb_point=24) metric = AveragePointwiseEuclideanMetric(feature=feature) elif metric == 'CoMF': feature = CenterOfMassFeature() metric = EuclideanMetric(feature) elif metric == 'MF': feature = MidpointFeature() metric = EuclideanMetric(feature) elif metric == 'AF': feature = ArcLengthFeature() metric = EuclideanMetric(feature) elif metric == 'VBEF': feature = VectorOfEndpointsFeature() metric = CosineMetric(feature) else: metric = "MDF_12points" qb = QuickBundles(threshold=threshold, metric=metric, max_nb_clusters=max_nb_clusters) clusters = qb.cluster(streamlines) labels = np.array(len(streamlines) * [None]) N_list = [] for i in range(len(clusters)): N_list.append(clusters[i]['N']) data_clusters = [] for i in range(len(clusters)): labels[clusters[i]['indices']] = i + 1 data_clusters.append(streamlines[clusters[i]['indices']]) return labels, data_clusters, N_list
def get_tract_profile(bundle, metric_img, metric_affine, use_weights=False, flip=True, num_points=100): ''' This function reorients the streamlines and extracts the diffusion metrics along the tract. It essentiall performs step 1. The default number of points along a tract is 100, which can be thought of as %-along a tract. The flip variable signals if you would like to flip the direction of the streamlines after reorientation. For example if after reorientation all the streamlines were motor cortex -> brainstem and you actually wanted brainstem -> motor cortex, then you set flip to True. The default is True because generally we see reorientation result in motor cortex -> brainstem. For the honours project, we were looking for the opposite ''' # Reorient all the streamlines so that they are follwing the same direction feature = ResampleFeature(nb_points=num_points) d_metric = AveragePointwiseEuclideanMetric(feature) qb = QuickBundles(np.inf, metric=d_metric) centroid_bundle = qb.cluster(bundle).centroids[0] oriented_bundle = orient_by_streamline(bundle, centroid_bundle) # Calculate weights for each streamline/node in a bundle, based on a # Mahalanobis distance from the core the bundle, at that node w_bundle = None if use_weights: w_bundle = gaussian_weights(oriented_bundle) # Sample the metric along the tract. The implementation of this function # is based off of work by Yeatman et al. in 2012 profile_bundle = afq_profile(metric_img, oriented_bundle, metric_affine, weights=w_bundle) # Reverse the profile bundle if the direction is not desired if flip: profile_bundle = np.flip(profile_bundle) return profile_bundle
def get_streamlines_centroid(streamlines, nb_points): """ Compute centroid from streamlines using QuickBundles. Parameters ---------- streamlines: list of ndarray The list of streamlines from which we compute the centroid. nb_points: int Number of points defining the centroid streamline. Returns ------- List of length one, containing a np.ndarray of shape (nb_points, 3) """ resample_feature = ResampleFeature(nb_points=nb_points) quick_bundle = QuickBundles( threshold=np.inf, metric=AveragePointwiseEuclideanMetric(resample_feature)) clusters = quick_bundle.cluster(streamlines) centroid_streamlines = clusters.centroids return centroid_streamlines
args = sys.argv tractogram_fn = args[1] output_dir = args[2] subject = args[3] tract_name = args[4] max_num_centroids = int(args[5]) points_per_sl = int(args[6]) # Open the file and extract streamlines streams, header = trackvis.read(tractogram_fn) streamlines = [sl[0] for sl in streams] # Run quickbundles with chosen parameters feature = ResampleFeature(nb_points=points_per_sl) metric = AveragePointwiseEuclideanMetric(feature) qb = QuickBundles(threshold=10., max_nb_clusters=max_num_centroids, metric=metric) clusters = qb.cluster(streamlines) # Extract the centroids centroids = [cluster.centroid for cluster in clusters] # If not enough generated, fill with empty streamlines diff = max_num_centroids - len(centroids) if diff > 0: print( "Not enough centroids generated, so generating empty streamlines for padding." )
def resample_tracts(t, n): feature = ResampleFeature(nb_points=n) print(dir(feature))
def main(): parser = buildArgsParser() args = parser.parse_args() ##################################### # Checking if the files exist # ##################################### for myFile in [args.tracts_filename, args.ref_anat_name]: if not os.path.isfile(myFile): parser.error('"{0}" must be a file!'.format(myFile)) if os.path.exists(args.output_name): print(args.output_name, " already exist and will be overwritten.") ##################################### # Loading tracts # ##################################### tracts_format = tc.detect_format(args.tracts_filename) tract_file = tracts_format(args.tracts_filename, anatFile=args.ref_anat_name) tracts = [i for i in tract_file] hdr = tract_file.hdr ##################################### # Checking if needs subsampling # ##################################### tmp = len(tracts[0]) if all(len(my_tract) == tmp for my_tract in tracts): nb_of_points = tmp else: nb_of_points = args.nb_of_points ##################################### # Compute QuickBundles # ##################################### print('Starting the QuickBundles...') # This feature tells QuickBundles to resample each streamlines on the fly. feature = ResampleFeature(nb_points=nb_of_points) # 'qb' is `dipy.segment.clustering.QuickBundles` object. qb = QuickBundles(threshold=args.dist_thresh, metric=MDF(feature)) # 'clusters' is `dipy.segment.clustering.ClusterMap` object. clusters = qb.cluster(tracts) centroids = clusters.centroids print(' --- done. Number of centroids:', len(centroids)) print(' Number of points per tract:', nb_of_points) print('Cluster sizes:', list(map(len, clusters))) ##################################### # Saving # ##################################### print('Saving...') out_format = tc.detect_format(args.output_name) qb_header = hdr qb_header[Header.NB_FIBERS] = len(centroids) out_centroids = out_format.create(args.output_name, qb_header, anatFile=args.ref_anat_name) out_centroids += [s for s in centroids] out_centroids.close() print(' --- done.')
def agreement(model_path, dwi_path_1, trk_path_1, dwi_path_2, trk_path_2, wm_path, fixel_cnt_path, cluster_thresh, centroid_size, fixel_thresh, bundle_min_cnt, gpu_queue=None): try: gpu_idx = maybe_get_a_gpu() if gpu_queue is None else gpu_queue.get() os.environ["CUDA_VISIBLE_DEVICES"] = gpu_idx except Exception as e: print(str(e)) temperature = np.round(float(re.findall("T=(.*)\.h5", model_path)[0]), 6) model = load_model(model_path) print("Load data ...") dwi_img_1 = nib.load(dwi_path_1) dwi_img_1 = nib.funcs.as_closest_canonical(dwi_img_1) affine_1 = dwi_img_1.affine dwi_1 = dwi_img_1.get_data() dwi_img_2 = nib.load(dwi_path_2) dwi_img_2 = nib.funcs.as_closest_canonical(dwi_img_2) affine_2 = dwi_img_2.affine dwi_2 = dwi_img_2.get_data() wm_img = nib.load(wm_path) wm_data = wm_img.get_data() n_wm = (wm_data > 0).sum() fixel_cnt = nib.load(fixel_cnt_path).get_data()[:, :, :, 0] fixel_cnt = fixel_cnt[wm_data > 0] k_fixels = np.unique(fixel_cnt) max_fixels = k_fixels.max() n_fixels_gt = np.sum(k * (fixel_cnt == k).sum() for k in k_fixels) img_shape = dwi_1.shape[:-1] #--------------------------------------------------------------------------- tractogram_1 = maybe_add_tangent(trk_path_1) tractogram_2 = maybe_add_tangent(trk_path_2) streamlines_1 = tractogram_1.streamlines streamlines_2 = tractogram_2.streamlines n_streamlines_1 = len(streamlines_1) n_streamlines_2 = len(streamlines_2) tractogram_1.extend(tractogram_2) ############################################################################ print("Clustering streamlines.") feature = ResampleFeature(nb_points=centroid_size) qb = QuickBundles(threshold=cluster_thresh, metric=AveragePointwiseEuclideanMetric(feature)) bundles = qb.cluster(streamlines_1) bundles.refdata = tractogram_1 n_bundles = len(bundles) print("Found {} bundles.".format(n_bundles)) print("Computing bundle masks...") direction_masks_1 = np.zeros((n_bundles, ) + img_shape + (3, ), np.float16) direction_masks_2 = np.zeros((n_bundles, ) + img_shape + (3, ), np.float16) count_masks_1 = np.zeros((n_bundles, ) + img_shape, np.uint16) count_masks_2 = np.zeros((n_bundles, ) + img_shape, np.uint16) marginal_bundles = 0 for i, b in enumerate(bundles.clusters): is_from_1 = np.argwhere( np.array(b.indices) < n_streamlines_1).squeeze().tolist() is_from_2 = np.argwhere( np.array(b.indices) >= n_streamlines_1).squeeze().tolist() if (np.sum(is_from_1) > bundle_min_cnt and np.sum(is_from_2) > bundle_min_cnt): bundle_map(b[is_from_1], affine_1, img_shape, dir_out=direction_masks_1[i], cnt_out=count_masks_1[i]) bundle_map(b[is_from_2], affine_2, img_shape, dir_out=direction_masks_2[i], cnt_out=count_masks_2[i]) else: marginal_bundles += 1 assert direction_masks_1.dtype.name == "float16" assert direction_masks_2.dtype.name == "float16" assert count_masks_1.dtype.name == "uint16" assert count_masks_2.dtype.name == "uint16" print("Computed bundle {:3d}.".format(i), end="\r") #gc.collect() overlap = ((count_masks_1 > 0) * (count_masks_2 > 0) * np.expand_dims(wm_data > 0, 0)) print("Calculating Fixels...") fixel_directions_1 = [] fixel_directions_2 = [] fixel_cnts_1 = [] fixel_cnts_2 = [] fixel_ijk = [] n_fixels = [] no_overlap = 0 for vox in np.argwhere(wm_data > 0): matched = overlap[:, vox[0], vox[1], vox[2]] > 0 if matched.sum() > 0: dir_1 = direction_masks_1[matched, vox[0], vox[1], vox[2], :] cnts_1 = count_masks_1[matched, vox[0], vox[1], vox[2]] dir_2 = direction_masks_2[matched, vox[0], vox[1], vox[2], :] cnts_2 = count_masks_2[matched, vox[0], vox[1], vox[2]] fixels1, fixels2, f_cnts_1, f_cnts_2 = cluster_fixels( dir_1, dir_2, cnts_1, cnts_2, threshold=np.cos(np.pi / fixel_thresh)) n_f = len(fixels1) fixel_directions_1.append(fixels1) fixel_directions_2.append(fixels2) fixel_cnts_1.append(f_cnts_1) fixel_cnts_2.append(f_cnts_2) fixel_ijk.append(np.tile(vox, (n_f, 1))) n_fixels.append(n_f) else: no_overlap += 1 fixel_directions_1 = np.vstack(fixel_directions_1) fixel_directions_2 = np.vstack(fixel_directions_2) fixel_cnts_1 = np.vstack(fixel_cnts_1).reshape(-1) fixel_cnts_2 = np.vstack(fixel_cnts_2).reshape(-1) fixel_ijk = np.vstack(fixel_ijk) #gc.collect() ############################################################################ print("Computing agreement ...") n_fixels_sum = np.sum(n_fixels) block_size = get_blocksize(model, dwi_1.shape[-1]) d_1 = np.zeros( [n_fixels_sum, block_size, block_size, block_size, dwi_1.shape[-1]]) d_2 = np.zeros( [n_fixels_sum, block_size, block_size, block_size, dwi_1.shape[-1]]) i, j, k = fixel_ijk.T for idx in range(block_size**3): ii, jj, kk = np.unravel_index(idx, (block_size, block_size, block_size)) d_1[:, ii, jj, kk, :] = dwi_1[i + ii - 1, j + jj - 1, k + kk - 1, :] d_2[:, ii, jj, kk, :] = dwi_2[i + ii - 1, j + jj - 1, k + kk - 1, :] d_1 = d_1.reshape(-1, dwi_1.shape[-1] * block_size**3) d_2 = d_2.reshape(-1, dwi_2.shape[-1] * block_size**3) dnorm_1 = np.linalg.norm(d_1, axis=1, keepdims=True) + 10**-2 dnorm_2 = np.linalg.norm(d_2, axis=1, keepdims=True) + 10**-2 d_1 /= dnorm_1 d_2 /= dnorm_2 model_inputs_1 = np.hstack([fixel_directions_1, d_1, dnorm_1]) model_inputs_2 = np.hstack([fixel_directions_2, d_2, dnorm_2]) fixel_agreements, fixel_kappa_1, fixel_kappa_2, fixel_mu_1, fixel_mu_2 = \ agreement_for( model, model_inputs_1, model_inputs_2, fixel_cnts_1, fixel_cnts_2 ) agreement = {"temperature": temperature} agreement["model_path"] = model_path agreement["n_bundles"] = n_bundles agreement["value"] = fixel_agreements.sum() / n_fixels_gt agreement["min"] = fixel_agreements.min() agreement["mean"] = fixel_agreements.mean() agreement["max"] = fixel_agreements.max() agreement["std"] = fixel_agreements.std() agreement["n_fixels_sum"] = n_fixels_sum agreement["n_wm"] = n_wm agreement["n_fixels_gt"] = n_fixels_gt agreement["marginal_bundles"] = marginal_bundles agreement["no_overlap"] = no_overlap agreement["dwi_1"] = dwi_path_1 agreement["trk_1"] = trk_path_1 agreement["dwi_2"] = dwi_path_2 agreement["trk_2"] = trk_path_2 agreement["fixel_cnt_path"] = fixel_cnt_path agreement["cluster_thresh"] = cluster_thresh agreement["centroid_size"] = centroid_size agreement["fixel_thresh"] = fixel_thresh agreement["bundle_min_cnt"] = bundle_min_cnt agreement["wm_path"] = wm_path agreement["ideal"] = ideal_agreement(temperature) for k, cnt in zip(*np.unique(n_fixels, return_counts=True)): agreement["n_vox_with_{}_fixels".format(k)] = cnt for i in [1, 5, 10]: agreement["le_{}_fibers_per_fixel_1".format(i)] = np.mean( fixel_cnts_1 < i) agreement["mean_fibers_per_fixel_1"] = np.mean(fixel_cnts_1) agreement["median_fibers_per_fixel_1"] = np.median(fixel_cnts_1) agreement["mean_fibers_per_fixel_2"] = np.mean(fixel_cnts_2) agreement["median_fibers_per_fixel_2"] = np.median(fixel_cnts_2) agreement["std_fibers_per_fixel"] = np.std(fixel_cnts_1) agreement["max_fibers_per_fixel"] = np.max(fixel_cnts_1) agreement["min_fibers_per_fixel"] = np.min(fixel_cnts_1) fixel_angles = (fixel_directions_1 * fixel_directions_2).sum(axis=1) agreement["mean_fixel_angle"] = fixel_angles.mean() agreement["median_fixel_angle"] = np.median(fixel_angles) agreement["std_fixel_angle"] = fixel_angles.std() agreement["negative_fixel_angles"] = (fixel_angles < 0).mean() save(agreement, "agreement_T={}.yml".format(temperature), os.path.dirname(model_path)) np.savez( os.path.join(os.path.dirname(model_path), "data_T={}".format(temperature)), fixel_cnts_1=fixel_cnts_1, fixel_cnts_2=fixel_cnts_2, fixel_mu_1=fixel_mu_1, fixel_mu_2=fixel_mu_2, fixel_kappa_1=fixel_kappa_1, fixel_kappa_2=fixel_kappa_2, fixel_directions_1=fixel_directions_1, fixel_directions_2=fixel_directions_2, fixel_agreements=fixel_agreements, ) K.clear_session() if gpu_queue is not None: gpu_queue.put(gpu_idx)
def __init__(self): super(GPSDistance, self).__init__(feature=ResampleFeature(nb_points=288))
def outliers_removal_using_hierarchical_quickbundles(streamlines, nb_points=12, min_threshold=0.5, nb_samplings_max=30, sampling_seed=1234, fast_approx=False): """ Classify inliers and outliers from a list of streamlines. Parameters ---------- streamlines: list of ndarray The list of streamlines from which inliers and outliers are separated. min_threshold: float Quickbundles distance threshold for the last threshold. nb_samplings_max: int Number of run executed to explore the search space. A different sampling is used each time. sampling_seed: int Random number generation initialization seed. Returns ------- ndarray: Float value representing the 0-1 score for each streamline """ if nb_samplings_max < 2: raise ValueError("'nb_samplings_max' must be >= 2") rng = np.random.RandomState(sampling_seed) resample_feature = ResampleFeature(nb_points=nb_points) metric = AveragePointwiseEuclideanMetric(resample_feature) box_min, box_max = get_streamlines_bounding_box(streamlines) # Half of the bounding box's halved diagonal length. initial_threshold = np.min(np.abs(box_max - box_min)) / 2. # Quickbundle's threshold is halved between hierarchical level. if fast_approx: thresholds = np.array([2 / 1.2**i for i in range(25)][1:]) thresholds = np.concatenate(([40, 20, 10, 5, 2.5], thresholds[thresholds > min_threshold])) else: thresholds = takewhile(lambda t: t >= min_threshold, (initial_threshold / 1.2**i for i in count())) thresholds = list(thresholds) ordering = np.arange(len(streamlines)) path_lengths_per_streamline = 0 streamlines_path = np.ones((len(streamlines), len(thresholds), nb_samplings_max), dtype=int) * -1 for i in range(nb_samplings_max): rng.shuffle(ordering) cluster_orderings = [ordering] for j, threshold in enumerate(thresholds): id_cluster = 0 next_cluster_orderings = [] qb = QuickBundles(metric=metric, threshold=threshold) for cluster_ordering in cluster_orderings: clusters = qb.cluster(streamlines, ordering=cluster_ordering) for _, cluster in enumerate(clusters): streamlines_path[cluster.indices, j, i] = id_cluster id_cluster += 1 if len(cluster) > 10: next_cluster_orderings.append(cluster.indices) cluster_orderings = next_cluster_orderings if i <= 1: # Needs at least two orderings to compute stderror. continue path_lengths_per_streamline = np.sum((streamlines_path != -1), axis=1)[:, :i] summary = np.mean(path_lengths_per_streamline, axis=1) / np.max(path_lengths_per_streamline) return summary
# import random # random.seed(123) # oldidx = random.sample(oldidx,10) # old = [l[o] for o in oldidx] # age = [young,old] # %% #set path mypath = '/Users/alex/code/Wenlin/data' outpath = '/Users/alex/code/Wenlin/Tracts_Registration/results' # %% #set parameter num_points1 = 50 distance1 = 1 feature1 = ResampleFeature(nb_points=num_points1) metric1 = AveragePointwiseEuclideanMetric(feature=feature1) #group cluster parameter num_points2 = 50 distance2 = 2 feature2 = ResampleFeature(nb_points=num_points2) metric2 = AveragePointwiseEuclideanMetric(feature=feature2) # %% #load the control animal streams_control, hdr_control = load_trk( mypath + '/wenlin_results/N54900_bmCSA_detr_small.trk') labels_control, affine_labels_control = load_nifti( mypath + '/wenlin_data/labels/fa_labels_warp_N54900_RAS.nii.gz') fa_control, affine_fa_control = load_nifti(
def __init__(self): super(GPSDistanceCustom, self).__init__(feature=ResampleFeature(nb_points=256)) self.tab_route_voxel = [] self.dict_routes = {} self.tab_dist = []
**Note:** Resampling streamlines has an impact on clustering results both in term of speed and quality. Setting the number of points too low will result in a loss of information about the shape of the streamlines. On the contrary, setting the number of points too high will slow down the clustering process. """ from dipy.segment.clustering import QuickBundles from dipy.segment.metric import ResampleFeature from dipy.segment.metric import AveragePointwiseEuclideanMetric # Get some streamlines. streamlines = get_streamlines() # Previously defined. # Streamlines will be resampled to 24 points on the fly. feature = ResampleFeature(nb_points=24) metric = AveragePointwiseEuclideanMetric(feature=feature) # a.k.a. MDF qb = QuickBundles(threshold=10., metric=metric) clusters = qb.cluster(streamlines) print("Nb. clusters:", len(clusters)) print("Cluster sizes:", list(map(len, clusters))) """ :: Nb. clusters: 4 Cluster sizes: [64, 191, 44, 1] .. _clustering-examples-CenterOfMassFeature:
def resample_fibers(streamlines, nb_points=12): streamlines_new = [] for sl in streamlines: feature = ResampleFeature(nb_points=nb_points) streamlines_new.append(feature.extract(sl)) return streamlines_new
import nibabel as nib from nibabel.streamlines import Tractogram from dipy.data import fetch_viz_icons, read_viz_icons from dipy.fixes import argparse from dipy.segment.clustering import QuickBundles from dipy.segment.metric import ResampleFeature from dipy.segment.metric import AveragePointwiseEuclideanMetric as MDF from dipy.viz import window, actor, gui_2d, utils, gui_follower from dipy.viz.colormap import distinguishable_colormap, line_colors from dipy.viz.interactor import CustomInteractorStyle metric = MDF(ResampleFeature(nb_points=30)) darkcolors = [(0.1, 0, 0), (0.1, 0.1, 0), (0.1, 0.1, 0.1), (0, 0.1, 0), (0, 0.1, 0.1), (0, 0, 0.1), (0.1, 0, 0.1)] def animate_button_callback(iren, obj, button): """ General purpose callback that dims a button. """ # iren: CustomInteractorStyle # obj: vtkActor picked # button: Button2D color = np.asarray(obj.GetProperty().GetColor()) obj.GetProperty().SetColor(*(color * 0.5)) iren.force_render() iren.event.abort() # Stop propagating the event.