provide the ROI data, the affine, the color in [R,G,B], and the opacity as a decimal between zero and one. Here, we set the color as blue/green with 50% opacity. """ surface_opacity = 0.5 surface_color = [0, 1, 1] seedroi_actor = actor.contour_from_roi(seed_mask, affine, surface_color, surface_opacity) """ Next, we initialize a ''Renderer'' object and add both actors to the rendering. """ ren = window.ren() ren.add(streamlines_actor) ren.add(seedroi_actor) """ If you uncomment the following line, the rendering will pop up in an interactive window. """ interactive = False if interactive: window.show(ren) window.record(ren, out_path='contour_from_roi_tutorial.png', size=(1200, 900)) """ .. figure:: contour_from_roi_tutorial.png :align: center
a decimal between zero and one. Here, we set the color as blue/green with 50% opacity. """ surface_opacity = 0.5 surface_color = [0, 1, 1] seedroi_actor = actor.contour_from_roi(seed_mask, affine, surface_color, surface_opacity) """ Next, we initialize a ''Renderer'' object and add both actors to the rendering. """ ren = window.ren() ren.add(streamlines_actor) ren.add(seedroi_actor) """ If you uncomment the following line, the rendering will pop up in an interactive window. """ interactive = False if interactive: window.show(ren) window.record(ren, out_path='contour_from_roi_tutorial.png', size=(1200, 900)) """
def test_contour_from_roi(): # Render volume renderer = window.renderer() data = np.zeros((50, 50, 50)) data[20:30, 25, 25] = 1. data[25, 20:30, 25] = 1. affine = np.eye(4) surface = actor.contour_from_roi(data, affine, color=np.array([1, 0, 1]), opacity=.5) renderer.add(surface) renderer.reset_camera() renderer.reset_clipping_range() # window.show(renderer) # Test binarization renderer2 = window.renderer() data2 = np.zeros((50, 50, 50)) data2[20:30, 25, 25] = 1. data2[35:40, 25, 25] = 1. affine = np.eye(4) surface2 = actor.contour_from_roi(data2, affine, color=np.array([0, 1, 1]), opacity=.5) renderer2.add(surface2) renderer2.reset_camera() renderer2.reset_clipping_range() # window.show(renderer2) arr = window.snapshot(renderer, 'test_surface.png', offscreen=True) arr2 = window.snapshot(renderer2, 'test_surface2.png', offscreen=True) report = window.analyze_snapshot(arr, find_objects=True) report2 = window.analyze_snapshot(arr2, find_objects=True) npt.assert_equal(report.objects, 1) npt.assert_equal(report2.objects, 2) # test on real streamlines using tracking example from dipy.data import read_stanford_labels from dipy.reconst.shm import CsaOdfModel from dipy.data import default_sphere from dipy.direction import peaks_from_model from dipy.tracking.local import ThresholdTissueClassifier from dipy.tracking import utils from dipy.tracking.local import LocalTracking from dipy.viz.colormap import line_colors hardi_img, gtab, labels_img = read_stanford_labels() data = hardi_img.get_data() labels = labels_img.get_data() affine = hardi_img.get_affine() white_matter = (labels == 1) | (labels == 2) csa_model = CsaOdfModel(gtab, sh_order=6) csa_peaks = peaks_from_model(csa_model, data, default_sphere, relative_peak_threshold=.8, min_separation_angle=45, mask=white_matter) classifier = ThresholdTissueClassifier(csa_peaks.gfa, .25) seed_mask = labels == 2 seeds = utils.seeds_from_mask(seed_mask, density=[1, 1, 1], affine=affine) # Initialization of LocalTracking. # The computation happens in the next step. streamlines = LocalTracking(csa_peaks, classifier, seeds, affine, step_size=2) # Compute streamlines and store as a list. streamlines = list(streamlines) # Prepare the display objects. streamlines_actor = actor.line(streamlines, line_colors(streamlines)) seedroi_actor = actor.contour_from_roi(seed_mask, affine, [0, 1, 1], 0.5) # Create the 3d display. r = window.ren() r2 = window.ren() r.add(streamlines_actor) arr3 = window.snapshot(r, 'test_surface3.png', offscreen=True) report3 = window.analyze_snapshot(arr3, find_objects=True) r2.add(streamlines_actor) r2.add(seedroi_actor) arr4 = window.snapshot(r2, 'test_surface4.png', offscreen=True) report4 = window.analyze_snapshot(arr4, find_objects=True) # assert that the seed ROI rendering is not far # away from the streamlines (affine error) npt.assert_equal(report3.objects, report4.objects)