def save_script(script_path, keypoints): temp_path = script_path + '.tmp' with open(temp_path, 'w') as f: for x in keypoints: f.write(neuroglancer.to_url(x['state']) + '\n') f.write(str(x['transition_duration']) + '\n') os.rename(temp_path, script_path)
def run_batch(args, graph): for path in args.split_seeds: split_seeds = load_split_seeds(path) split_result = do_split(graph=graph, split_seeds=split_seeds, agglo_id=args.agglo_id) state = display_split_result( graph=graph, split_seeds=split_seeds, image_url=args.image_url, segmentation_url=args.segmentation_url, **split_result) print('<p><a href="%s">%s</a></p>' % (neuroglancer.to_url(state), path))
def save_script(script_path, keypoints): temp_path = script_path + '.tmp' with open(temp_path, 'w') as f: for x in keypoints: f.write(neuroglancer.to_url(x['state']) + '\n') f.write(str(x['transition_duration']) + '\n') if hasattr(os, 'replace'): # Only available on Python3 os.replace(temp_path, script_path) else: # Fails on Windows if script_path already exists os.rename(temp_path, script_path)
def dataset_view(datasetname): dataset = DataSet.query.filter(DataSet.name == datasetname).first_or_404() state = neuroglancer.ViewerState() state.layers['img'] = neuroglancer.ImageLayer(source='precomputed://' + dataset.image_source) if dataset.pychunkedgraph_viewer_source is not None: state.layers['seg'] = neuroglancer.SegmentationLayer( source='graphene://' + dataset.pychunkedgraph_viewer_source) else: state.layers['seg'] = neuroglancer.SegmentationLayer( source='precomputed://' + dataset.flat_segmentation_source) state.layers['ann'] = neuroglancer.AnnotationLayer() state.layout = "xy-3d" ng_url = neuroglancer.to_url(state, prefix=current_app.config['NEUROGLANCER_URL']) return render_template('dataset.html', dataset=dataset, ng_url=ng_url, version=__version__)
help='Obtain the Neuroglancer client code from the specified URL.') args = ap.parse_args() if args.bind_address: neuroglancer.set_server_bind_address(args.bind_address) if args.static_content_url: neuroglancer.set_static_content_source(url=args.static_content_url) anno = Annotator(args.filename) for url in args.add_segments_from_url: anno.add_segments_from_state(url) if args.print_sets: print(repr(anno.get_sets())) if args.print_combined_state: print(neuroglancer.to_url(anno.make_combined_state())) if args.print_summary: print('<html>') print('<h1>%s</h1>' % args.filename) print('<a href="%s">Neuroglancer</a><br/>' % neuroglancer.to_url(anno.make_combined_state())) print(repr(anno.get_sets())) print('</html>') else: print(anno.get_viewer_url()) if not args.no_webbrowser: anno.show()
import neuroglancer import numpy as np import argparse from pathlib import Path viewer = neuroglancer.Viewer() print(viewer) parser = argparse.ArgumentParser() parser.add_argument("nifti_path", type=Path) p = parser.parse_args() with viewer.txn() as s: s.layers['image'] = neuroglancer.ImageLayer( #source='nifti://http://127.0.0.1:9000/B51315_T1_masked.nii.gz', source='nifti://http://127.0.0.1:9000/B51325_invivoAPOE1_labels.nii.gz' ) webbrowser.open_new(viewer.get_viewer_url()) print(neuroglancer.to_url(viewer.state)) print(viewer.state) print(viewer) neuroglancer.stop()
def print_combined_state_url(self): print(neuroglancer.to_url(self.make_combined_state()))
def url(self): return neuroglancer.to_url(self.state, prefix=self.base)
def render_on_neuroglancer(mesh_ids, dff, id_column='mesh_ids', pos_col="center_of_mass", center=[96809, 136521, 540]): #r= requests.get(blank_state_basil) # our base viewer configuration base_json = { "layers": [{ "source": "precomputed://gs://neuroglancer-fafb-data/fafb_v14/fafb_v14_orig", "type": "image", "name": "fafb_v14" }, { "source": "precomputed://gs://neuroglancer-fafb-data/fafb_v14/fafb_v14_clahe", "type": "image", "name": "fafb_v14_clahe", "visible": False }, { "type": "segmentation", "mesh": "precomputed://gs://neuroglancer-fafb-data/elmr-data/FAFBNP.surf/mesh", "selectedAlpha": 0.78, "notSelectedAlpha": 0.1, "segments": ["22", "59"], "skeletonRendering": { "mode2d": "lines_and_points", "mode3d": "lines" }, "name": "neuropil-regions-surface" }], "navigation": { "pose": { "position": { "voxelSize": [4, 4, 40], "voxelCoordinates": [124416, 67072, 3531.5] } }, "zoomFactor": 4 }, "perspectiveOrientation": [ 0.0033587864600121975, -0.005099608097225428, -0.0007366925710812211, 0.9999811053276062 ], "perspectiveZoom": 10761.580012248845, "layout": "3d" } viewer = neuroglancer.Viewer() viewer.set_state(base_json) with viewer.txn() as s: if len(mesh_ids) > 0: ind = np.where(dff['mesh_id'] == mesh_ids[0])[0][0] center = np.asarray(dff[pos_col][ind]) / [4, 4, 40] s.navigation.pose.position.voxelCoordinates = center seg_layer = s.layers['neuropil-regions-surface'] seg_layer.selectedAlpha = 0.78 seg_layer.notSelectedAlpha = 0.1 seg_layer.segments = mesh_ids return (neuroglancer.to_url( viewer.state, prefix='https://neuromancer-seung-import.appspot.com/'))
'--static-content-url', help='Obtain the Neuroglancer client code from the specified URL.') args = ap.parse_args() if args.bind_address: neuroglancer.set_server_bind_address(args.bind_address) if args.static_content_url: neuroglancer.set_static_content_source(url=args.static_content_url) anno = Annotator(args.filename) for url in args.add_segments_from_url: anno.add_segments_from_state(url) if args.print_sets: print(repr(anno.get_sets())) if args.print_combined_state: print(neuroglancer.to_url(anno.make_combined_state())) if args.print_summary: print('<html>') print('<h1>%s</h1>' % args.filename) print( '<a href="%s">Neuroglancer</a><br/>' % neuroglancer.to_url(anno.make_combined_state())) print(repr(anno.get_sets())) print('</html>') else: print(anno.get_viewer_url()) if not args.no_webbrowser: anno.show()
def runserver(): event_schedule.enter(30, 1, runserver) event_schedule.enter(30, 1, runserver()) event_schedule.run() viewer = neuroglancer.Viewer() a = np.zeros((3, 100, 100, 100), dtype=np.uint8) ix, iy, iz = np.meshgrid(*[np.linspace(0, 1, n) for n in a.shape[1:]], indexing='ij') a[0, :, :, :] = np.abs(np.sin(4 * (ix + iy))) * 255 a[1, :, :, :] = np.abs(np.sin(4 * (iy + iz))) * 255 a[2, :, :, :] = np.abs(np.sin(4 * (ix + iz))) * 255 with viewer.txn() as s: s.layers['image'] = neuroglancer.ImageLayer( source= 'a. nifti://http://127.0.0.1:9000/Users/alex/AlexBadeaMyAtlases/atlases/chass_symmetric3/chass_symmetric3_FA.nii.gz', ) # s.layers['ground_truth'] = neuroglancer.SegmentationLayer( # source='precomputed://gs://neuroglancer-public-data/flyem_fib-25/ground_truth', # ) s.layers['overlay'] = neuroglancer.ImageLayer( # source=neuroglancer.LocalVolume(a, voxel_size=[8, 8, 8], voxel_offset=[3000, 3000, 3000]), shader=""" void main() { emitRGB(vec3(toNormalized(getDataValue(0)), toNormalized(getDataValue(1)), toNormalized(getDataValue(2)))); } """, ) s.voxel_coordinates = [3000, 3000, 3000] num_actions = 0 def my_action(s): global num_actions num_actions += 1 with viewer.config_state.txn() as st: st.status_messages['hello'] = ( 'Got action %d: mouse position = %r' % (num_actions, s.mouse_voxel_coordinates)) print('Got my-action') print(' Mouse position: %s' % (s.mouse_voxel_coordinates, )) print(' Layer selected values: %s' % (s.selected_values, )) viewer.actions.add('my-action', my_action) with viewer.config_state.txn() as s: s.input_event_bindings.viewer['keyt'] = 'my-action' s.status_messages['hello'] = 'Welcome to this example' # with viewer.txn() as s: # s.layout = '3d' # s.projection_scale = 3000 """ from ipywidgets import Image screenshot = viewer.screenshot(size=[1000, 1000]) screenshot_image = Image(value=screenshot.screenshot.image) screenshot_image """ with viewer.txn() as s: s.layout = neuroglancer.row_layout( [neuroglancer.LayerGroupViewer(layers=['image', 'overlay'])]) # neuroglancer.LayerGroupViewer(layers=['segmentation'])]) # with viewer.txn() as s: # s.layout = neuroglancer.row_layout( # [neuroglancer.LayerGroupViewer(layers=['image']), # neuroglancer.LayerGroupViewer(layers=['segmentation'])]) print(neuroglancer.to_url(viewer.state)) print(viewer.state) print(viewer) webbrowser.open_new(viewer.get_viewer_url()) neuroglancer.stop()
action='store_true', help='Prints a neuroglancer link for the combined state.') ap.add_argument( '--print-summary', action='store_true', help='Prints a neuroglancer link for the combined state.') args = ap.parse_args() anno = Annotator(args.filename) for url in args.add_segments_from_url: anno.add_segments_from_state(url) if args.print_sets: print(repr(anno.get_sets())) if args.print_combined_state: anno.print_combined_state_url() if args.print_summary: print('<html>') print('<h1>%s</h1>' % args.filename) print( '<a href="%s">Neuroglancer</a><br/>' % neuroglancer.to_url(anno.make_combined_state())) print(repr(anno.get_sets())) print('</html>') else: print(anno.get_viewer_url()) if not args.no_webbrowser: anno.show()