def _display_in_jupyter(self, dashboard_url, port, mode, width, height): if mode == 'inline': display(IFrame(dashboard_url, width, height)) elif mode == 'external': # Display a hyperlink that can be clicked to open Dashboard print("AutoPlotter is running on {dashboard_url}".format( dashboard_url=dashboard_url )) elif mode == 'jupyterlab': # Update front-end extension _dash_comm.send({ 'type': 'show', 'port': port, 'url': dashboard_url, })
def view_table(data, width=1000, height=500, filename=None, autodelete=True, **kwargs): # A TemporaryFile does not work with Jupyter Notebook if filename is None: filename = str(uuid4()) + '.html' try: with open(filename, 'w') as f: f.write(generate_html(data=data, width=width, height=height, **kwargs)) return IFrame(filename, width=width, height=height) finally: if autodelete: Timer(5, os.unlink, args=[filename]).start()
def save_fig(name, default_overwrite=False): directory = check_or_create_dir('plots') filename = directory + '/%s.pdf' % name if not default_overwrite and os.path.isfile(filename): if IPython is not None: IPython.display.display(IFrame(filename, width=800, height=600)) overwrite = input( 'A file named %s already exists. Overwrite (Leave string empty for NO!)?' % filename) if not overwrite: print('No changes done.') return plt.savefig(filename) print('file saved')
def explain(self) -> None: """Provide a description of the example.""" file_name = self.explain_url.split("/")[-1] if file_name == "README.md": explain_page = requests.get(self.explain_url) crawled = BeautifulSoup(explain_page.text, "html.parser") print(crawled.text) return None if type_of_script() == "terminal": webbrowser.open(self.explain_url) return None from IPython.display import IFrame return IFrame(self.explain_url, width=700, height=350)
def display_or_open_browser(url, clear=False, wait=True, new=0, autoraise=True): # type: (str, bool, bool, int, bool) -> None if _in_jupyter_environment(): from IPython.display import display, IFrame, clear_output if clear: clear_output(wait=wait) display(IFrame(src=url, width="100%", height="800px")) else: import webbrowser webbrowser.open(url, new=new, autoraise=autoraise)
def open_attachment(attach_name): path_attachments = "allegati" path_file = str(path_attachments + "/" + attach_name) ext = os.path.splitext(path_file)[-1].lower() #print(path_file) #print(ext) #print(ext.replace('.', '')) if (ext == '.pdf'): pdf = IFrame(path_file, width=700, height=400) display(pdf) else: #file = open(path_file, "rb") display(Markdown("![alt text](" + path_file + ")")) delete = widgets.Button(description='Rimuovi allegato') delete.on_click(on_button_delete) display(delete)
def write_html(self, name, notebook=False): """ This method gets the data structures supporting the nodes, edges, and options and updates the template to write the HTML holding the visualization. :type name_html: str """ check_html(name) self.html = self.generate_html(notebook=notebook) with open(name, "w+") as out: out.write(self.html) if notebook: return IFrame(name, width=self.width, height=self.height)
def view(self, keyword_regex: str = '', tags: list = None, file_format='handsontable', filename='me', width=800, height=300): pyexcel.save_as(records=list(self.find(keyword_regex, tags)), dest_file_name='{}.{}.html'.format( filename, file_format), dest_sheet_name='TagDict', js_url=module_path('handsontable.full.min.js'), css_url=module_path('handsontable.full.min.css')) return IFrame('{}.{}.html'.format(filename, file_format), width=width, height=height)
def display_cloud_hack(xyz): """ Display a point cloud inside a jupyter IFrame Arguments: xyz: a Nx3 matrix containing the 3D positions of N points """ import os import shutil import numpy as np # note: if you want to add color intensities to your points, use a Nx4 array and then change # the "-parse" option below to xyzi. Similarly for RGB color, save an Nx6 array # clear output dir try: shutil.rmtree('point_clouds') except FileNotFoundError: pass # create tmp mkdir_p('tmp') # dump data and convert np.savetxt("tmp/tmp.txt", xyz) os.system( "/home/PotreeConverter_PLY_toolchain/PotreeConverter/LAStools/bin/txt2las -parse xyz tmp/tmp.txt" ) os.system( "/home/PotreeConverter_PLY_toolchain/PotreeConverter/build/PotreeConverter/PotreeConverter tmp/tmp.las -o point_clouds -p test --material ELEVATION --edl-enabled" ) import os # this hack copies the point cloud to an external server where the iframe can be served if 'EXTERNAL_HTTP_SRV_URL' in os.environ.keys(): HOST = os.environ['EXTERNAL_HTTP_SRV_URL'] else: HOST = 'http://localhost:8008' # for the docker OUTDIR = 'point_cloud%d' % (np.random.randint(1000000)) import shutil shutil.copytree('point_clouds', '/shared/%s' % OUTDIR) from IPython.display import IFrame print('Accessing: %s/%s/test.html' % (HOST, OUTDIR)) return IFrame('%s/%s/test.html' % (HOST, OUTDIR), width=980, height=800)
def make_updated_graph_model(): ox.config(log_console=True, use_cache=True) G_2 = ox.load_graphml('osmnx_graph.graphml') # plot the street network with folium graph_map = ox.plot_graph_folium(G_2, edge_width=2) filepath = 'templates/graph.html' graph_map.save(filepath) IFrame(filepath, width=600, height=500) soup = BeautifulSoup(open('templates/graph.html'), 'html.parser') js_tag = soup.find_all("script") js_tag[5].append('{% block script %} {% endblock %}') with open("templates/graph.html", "w") as file: file.write(str(soup)) soup.find("div") div = soup.find("div") div = str(div).split() map_id = div[2][4:-8] fin = open("templates/graph.html", "rt") # output file to write the result to fout = open("templates/final_graph.html", "wt") # for each line in the input file for line in fin: # read replace the string and write to output file fout.write(line.replace(map_id, 'map_id')) # close input and output files fin.close() fout.close() soup = BeautifulSoup(open('templates/final_graph.html'), 'html.parser') data_body = '{% block body %} {% endblock %}' data_script = '{% block script %} {% endblock %}' data_style = '{% block style %} {% endblock %}' soup.body.append(data_body) soup.style.append(data_style) with open("templates/final_graph.html", "w") as file: file.write(str(soup))
def write_html(self, name, notebook=False): """ This method gets the data structures supporting the nodes, edges, and options and updates the template to write the HTML holding the visualization. :type name_html: str """ #check_html(name) # here, check if an href is present in the hover data use_link_template = False for n in self.nodes: title = n.get("title", None) if title: if "href" in title: """ this tells the template to override default hover mechanic, as the tooltip would move with the mouse cursor which made interacting with hover data useless. """ use_link_template = True break if not notebook: with open(self.path) as html: content = html.read() template = Template(content) else: template = self.template nodes, edges, height, width, options = self.get_network_data() self.html = template.render(height=height, width=width, nodes=nodes, edges=edges, options=options, use_DOT=self.use_DOT, dot_lang=self.dot_lang, widget=self.widget, bgcolor=self.bgcolor, conf=self.conf, tooltip_link=use_link_template) with open(name, "w+") as out: out.write(self.html) if notebook: return IFrame(name, width=self.width, height=self.height)
def launch_arborist_gui(json_data: str, height=650): """ :param json_data: json data to launch the Arborist with. :param height: IFrame height for output cell. """ new_temp_dir = tempfile.mkdtemp() tmp_json = os.path.join(new_temp_dir, 'tmp_json') with open(tmp_json, 'w') as f: f.write(json_data) # Signal created by Javascript to continue work here. done_signal = os.path.join(os.path.dirname(tmp_json), 'DONE') base_url = os.environ.get("ARBORIST_BASE_URL", "/") running_on = '{}transmart-arborist?treefile={}'.format( base_url, os.path.abspath(tmp_json)) display(IFrame(src=running_on, width='100%', height=height)) try: # Wait for the done signal file to be created before breaking the GIL. while not os.path.exists(done_signal): time.sleep(0.1) except KeyboardInterrupt: # This stops the interpreter without showing a stacktrace. pass else: updated_json = None # We've been having issues with a slow file system where the json response was empty # Now we make sure something is sent back. while not updated_json: time.sleep(0.1) with open(tmp_json, 'r') as f: updated_json = f.read() return updated_json finally: shutil.rmtree(new_temp_dir) # Clear output from Jupyter Notebook cell clear_output() print('Cleaning up before closing...')
def __init__(self, explanation, model=None, *, dataset=None, true_y=None, classes=None, features=None, port=5000, use_cdn=True, datasetX=None, trueY=None): # support legacy kwarg names if dataset is None and datasetX is not None: dataset = datasetX if true_y is None and trueY is not None: true_y = trueY if not ExplanationDashboard.service: try: ExplanationDashboard.service = ExplanationDashboard.DashboardService( port) self._thread = threading.Thread( target=ExplanationDashboard.service.run, daemon=True) self._thread.start() except Exception as e: ExplanationDashboard.service = None raise e ExplanationDashboard.service.use_cdn = use_cdn ExplanationDashboard.model_count += 1 predict_url = "http://{0}:{1}/{2}/predict".format( ExplanationDashboard.service.ip, ExplanationDashboard.service.port, str(ExplanationDashboard.model_count)) ExplanationDashboard.explanations[str(ExplanationDashboard.model_count)] =\ ExplanationDashboardInput(explanation, model, dataset, true_y, classes, features, predict_url) if "DATABRICKS_RUNTIME_VERSION" in os.environ: html = "<iframe src='http://{0}:{1}/{2}' width='100%' height='1200px' frameBorder='0'></iframe>".format( ExplanationDashboard.service.ip, ExplanationDashboard.service.port, ExplanationDashboard.model_count) _render_databricks(html) else: url = 'http://{0}:{1}/{2}'.format( ExplanationDashboard.service.ip, ExplanationDashboard.service.port, ExplanationDashboard.model_count) display(IFrame(url, "100%", 1200))
def matter_inline(line, cell): namespaces = list(_NAMESPACES.values()) namespaces = '\n'.join(namespaces) if namespaces else '' kws = line.split(' ') kwargs = {} for kw in kws: k, v = kw.split('=') kwargs[k] = v notebook = kwargs.pop('notebook', 'matter.py+notebook') file_id = kwargs.pop('file_id', 'file_id') filename = f"{dir_name}/{notebook}-{file_id}.html" js = py2js(cell) with open(filename, "w", encoding='utf8') as fp: fp.write(template.format(script=js, name=filename, page_link=filename, namespaces=namespaces) ) if 'height' not in kwargs: kwargs['height'] = '200px' return IFrame(filename, width="100%", **kwargs)
def __init__(self, local_file, label, cap, api_obj, fignum=None, upload_file=True, size=(500, 400), filetype='image', iframe_test=True, show_fignum=True): self.api_obj = api_obj self.local_file = local_file self.size = size self.cap = cap self.label = label self.fignum = fignum self.filetype = filetype self.show_fignum = show_fignum if self.show_fignum: fignum_str = 'Figure %s.' % fignum else: fignum_str = '' self.fignum_str = fignum_str from IPython.display import IFrame if upload_file: res1 = api_obj.upload_file(local_file) fname = local_file.split('/')[-1] res2 = api_obj.get_file_link(fname) self.cloud_file = res2 # this is a hacky solution to an odd bug I have noticed, # where after uploading an image (seems to be when this is # repeated a few times), the displayed image is sometimes # not the latest uploaded image. This seems to be resolved # by creating an IFrame with the dropbox link; so just do # this here with a temporary one, and then delete immediately if iframe_test: tmp = IFrame(self.cloud_file, width=500, height=500) del tmp
def plot_points(xyz, colors=None, size=0.1, axis=False): """ Args: xyz: colors: size: axis: Returns: """ positions = xyz.reshape(-1).tolist() camera_position = xyz.max(0) + abs(xyz.max(0)) look = xyz.mean(0) if colors is None: colors = [1, 0.5, 0] * len(positions) elif len(colors.shape) > 1: colors = colors.reshape(-1).tolist() if axis: axis_size = xyz.ptp() * 1.5 else: axis_size = 0 with open("plot_points.html", "w") as html: html.write( TEMPLATE_POINTS.format( camera_x=camera_position[0], camera_y=camera_position[1], camera_z=camera_position[2], look_x=look[0], look_y=look[1], look_z=look[2], positions=positions, colors=colors, points_size=size, axis_size=axis_size, )) return IFrame("plot_points.html", width=800, height=800)
def show_tree_filter(event_col, width=500, height=500, **kwargs): """ Shows tree selector, based on your event names. It uses `_` for splitting event names for group aggregation :param event_col: column with events :param width: width of IFrame :param height: height of IFrame :param kwargs: do nothing :return: nothing """ splitted_names = pd.Series(event_col.unique()).str.split('_', expand=True) res = _create_node(0, splitted_names, [], '') res = __TEMPLATE__.format(tree_data=json.dumps(res)) with open('./filter.html', 'w') as f: f.write(res) display(IFrame('./filter.html', width=width, height=height))
def show_doc(obj, iframe=False): """ Show the online LISA documentation about the given object. :param obj: Object to show the doc of. It can be anything, including instances. :type obj: object :param iframe: If ``True``, uses an IFrame, otherwise opens a web browser. :type iframe: bool """ doc_url = get_doc_url(obj) if iframe: print(doc_url) return IFrame(src=doc_url, width="100%", height="600em") else: webbrowser.open(doc_url)
def _build_iframe(self, width='100%', height=350): """ Helper function to build an :class:`ipython:IPython.display.IFrame` if that module exists within your environment :param width: width of the ipython cell :param height: height of the ipython cell :return: :class:`ipython:IPython.display.IFrame` """ try: from IPython.display import IFrame except ImportError: logger.info( 'in order to use this function, please install IPython') return None return IFrame('{}/dtale/iframe'.format(self._url), width=width, height=height)
def open_dashboard(wmo=None, cyc=None, width="100%", height=1000, url=None, type="ea"): """ Insert in a notebook cell the Euro-Argo dashboard page Parameters ---------- wmo: int The float WMO to display. By default, this is set to None and will insert the general dashboard. Returns ------- IFrame: IPython.lib.display.IFrame """ if type not in ["ea", "eric", "coriolis"]: raise InvalidDashboard("Invalid dashboard type") from IPython.display import IFrame if url is None: if type == "ea" or type == "eric": # Open Euro-Argo dashboard if wmo is None: url = "https://fleetmonitoring.euro-argo.eu" else: url = "https://fleetmonitoring.euro-argo.eu/float/{}".format( str(wmo)) elif type == 'coriolis': # Open Coriolis dashboard if wmo is not None: url = ("https://co-insitucharts.ifremer.fr/platform/{}/charts" ).format(str(wmo)) # return open_dashboard(url=("https://co-insitucharts.ifremer.fr/platform/{}/charts").format(str(self.WMO[0])), **kw) # # Note that argovis doesn't allow X-Frame insertion ! # elif type == 'argovis': # if cyc is None: # url = "https://argovis.colorado.edu/catalog/platforms/{}/page".format(str(wmo)) # else: # url = "https://argovis.colorado.edu/catalog/profiles/{}_{}/page".format(str(wmo),str(cyc)) return IFrame(url, width=width, height=height)
def tilemap(tif, name, overwrite=False, overlay=None, tilelvl=[9, 13]): id = hashlib.sha1(name).hexdigest()[:10] if overwrite: os.system('rm -rf %s' % id) os.system( 'gdal2tiles.py -e -z %d-%d -a 0,0,0 -s epsg:4326 -r bilinear -t "%s" %s -z 8-14 %s' % (tilelvl[0], tilelvl[1], name, tif, id)) with open('%s/leaflet.html' % id) as input: s = input.read() s = s.replace('http://cdn.leafletjs.com', 'https://cdn.leafletjs.com') s = s.replace('http://{s}.tile.osm.org', 'https://{s}.tile.openstreetmap.org') addLayer = 'map.addLayer(lyr);' if overlay: os.system( "wget 'https://raw.githubusercontent.com/calvinmetcalf/leaflet-ajax/master/dist/leaflet.ajax.min.js' -O %s/leaflet.ajax.min.js" % id) s = s.replace( 'leaflet.js"></script>', 'leaflet.js"></script>\n<script src="leaflet.ajax.min.js"></script>' ) vectorNewLayers = [] vectorOverlay = [] vectorAdd = [] for vecFile, vecName in overlay: vecId = hashlib.sha1(vecName).hexdigest()[:10] os.system('ogr2ogr -f "geojson" %s/%s.json %s' % (id, vecId, vecFile)) vectorNewLayers.append( 'var vecLayer%s = new L.GeoJSON.AJAX("%s.json");' % (vecId, vecId)) vectorOverlay.append('"%s":vecLayer%s' % (vecName, vecId)) vectorAdd.append('map.addLayer(vecLayer%s);' % vecId) s = s.replace('// Map', '\n'.join(vectorNewLayers) + '\n // Map') s = s.replace('{"Layer": lyr}', '{' + ','.join(vectorOverlay) + ', "Layer": lyr}') addLayer += '\n'.join(vectorAdd) s = s.replace(').addTo(map);', ').addTo(map); ' + addLayer) with open('%s/leaflet.html' % id, 'w') as output: output.write(s) return IFrame('%s/leaflet.html' % id, width='1000', height='600')
def graph(data, node_params=None, thresh=.05, width=500, height=500, interactive=True, layout_dump=None, show_percent=True, **kwargs): """ Plots graph by its edgelist representation :param data: graph in edgelist form :param node_params: mapping describes which node should be highlighted by target or source type Node param should be represented in the following form ```{ 'lost': 'bad_target', 'passed': 'nice_target', 'onboarding_welcome_screen': 'source', }``` If mapping is not given, it will be constracted from config :param thresh: threshold for filtering low frequency edges :param width: width of plot :param height: height of plot :param interactive: if True, then opens graph visualization in Jupyter Notebook IFrame :param layout_dump: path to layout dump :param show_percent: if True, then all edge weights are converted to percents :param kwargs: do nothing, needs for plot.graph usage with other functions :return: saves to `experiments_folder` webpage with js graph visualization """ if node_params is None: node_params = _prepare_node_params(node_params, data) res = _make_json_data(data, node_params, layout_dump, thresh=thresh, width=width - 100, height=height - 100, **kwargs) x = __TEMPLATE__.format( width=width, height=height, links=json.dumps(res.get('links')).encode('latin1').decode('utf-8'), node_params=json.dumps(node_params).encode('latin1').decode('utf-8'), nodes=json.dumps(res.get('nodes')).encode('latin1').decode('utf-8'), show_percent="1 !== 1" if show_percent else "1 === 1" ) if hasattr(data, 'trajectory'): plot_name = f'{data.trajectory.retention_config["experiments_folder"]}/index_{datetime.now()}' else: plot_name = 'index' plot_name = plot_name.replace(':', '_').replace('.', '_') + '.html' with open(plot_name, 'w') as f: f.write(x) if interactive: display(IFrame(plot_name, width=width + 100, height=height + 100))
def display(self, data: pd.DataFrame, chart_name: str) -> IFrame: """ Prepare package data and display MorphChart in an IFrame. Parameters ---------- data: pd.DataFrame: A DataFrame of data for the morphchart to plot. chart_name: str: The name of the Morph Chart to plot. """ # Check input data is correct format and that the chart being requested exists if not isinstance(data, pd.DataFrame): raise MsticpyException( "Data provided must be in pandas.DataFrame format") if chart_name not in self.charts: raise MsticpyException( f"{chart_name} is not a vaid chart. Run list_charts() to see avaliable charts" # pylint: disable=line-too-long ) # Create description file with length of our data set description_dict = self.charts[chart_name]["DescriptionFile"] description_dict["tables"][0]["rows"] = len(data) # Create output folder for package files out_path = Path.cwd().joinpath( *["morphchart_package", "description.json"]) Path.mkdir(Path.cwd().joinpath("morphchart_package"), exist_ok=True) # Write description file morph_file = open(out_path, "w") json.dump(description_dict, morph_file) # Write dataset to query_data csv data_out_path = out_path = Path.cwd().joinpath( *["morphchart_package", "query_data.csv"]) data.to_csv(data_out_path, index=False) # Display Morph Charts in IFrame with instructions print( f"Navigate to {Path.cwd().joinpath('morphchart_package')} and upload the files below" ) print("Charts provided by http://morphcharts.com/") return IFrame("http://morphcharts.com/designer.html", "100%", "600px")
def draw(cipher, options={}): html = """ <html> <head> <script src="https://rawgit.com/neo4j-contrib/neovis.js/master/dist/neovis.js"></script> <script type="text/javascript"> var viz; function draw() {{ var config = {{ container_id: "viz", server_url: "bolt://localhost:7687", server_user: "", server_password: "", labels: {options}, initial_cypher: "{cipher}" }}; viz = new NeoVis.default(config); viz.render(); }} </script> </head> <body onload="draw()"> <div id="viz"></div> </body> </html> """ unique_id = str(uuid.uuid4()) sanitized_cipher = cipher.replace("\"", "'").replace("\n", " ") html = html.format(cipher=sanitized_cipher, options=json.dumps(options)) filename = "figure/graph-{}.html".format(unique_id) file = open(filename, "w") file.write(html) file.close() return IFrame(filename, width="100%", height="400")
def plot(self, plot_column, legend_name, key_on, output='plot_data.html'): map = folium.Map(location=[100, 0], zoom_start=1.5) # Bing Map with country_geo(.json) map.choropleth(geo_data=self.__map_data, data=self.__data[plot_column], columns=plot_column, key_on=key_on, fill_color='YlGnBu', fill_opacity=0.7, line_opacity=0.2, legend_name=legend_name) # Convert Folium plot to HTML map.save(output) print('SUCCESS: Check for plot_data.html') # Use IFrame to display HTML if USE_JUPYTER_NB: from IPython.display import IFrame display(IFrame('plot_data.html', width=700, height=450))
def plot_voxelgrid(v_grid, cmap="Oranges", axis=False, output_name=None): if output_name is None: output_name = "plotVG.html" scaled_shape = v_grid.shape / min(v_grid.shape) # coordinates returned from argwhere are inversed so use [:, ::-1] points = np.argwhere(v_grid.vector)[:, ::-1] * scaled_shape s_m = plt.cm.ScalarMappable(cmap=cmap) rgb = s_m.to_rgba( v_grid.vector.reshape(-1)[v_grid.vector.reshape(-1) > 0])[:, :-1] camera_position = points.max(0) + abs(points.max(0)) look = points.mean(0) if axis: axis_size = points.ptp() * 1.5 else: axis_size = 0 with open(output_name, "w") as html: html.write( TEMPLATE.format(camera_x=camera_position[0], camera_y=camera_position[1], camera_z=camera_position[2], look_x=look[0], look_y=look[1], look_z=look[2], X=points[:, 0].tolist(), Y=points[:, 1].tolist(), Z=points[:, 2].tolist(), R=rgb[:, 0].tolist(), G=rgb[:, 1].tolist(), B=rgb[:, 2].tolist(), S_x=scaled_shape[0], S_y=scaled_shape[2], S_z=scaled_shape[1], n_voxels=sum(v_grid.vector.reshape(-1) > 0), axis_size=axis_size)) return IFrame(output_name, width=800, height=800)
def deck_to_html(deck_json, mapbox_key=None, filename=None, open_browser=False, notebook_display=False, css_background_color=None, iframe_height=500, iframe_width=500, tooltip=True, custom_libraries=None, as_string=False): """Converts deck.gl format JSON to an HTML page""" html = render_json_to_html( deck_json, mapbox_key=mapbox_key, tooltip=tooltip, css_background_color=css_background_color, custom_libraries=custom_libraries, ) if as_string: return html f = None try: f = open_named_or_temporary_file(filename) f.write(html) finally: if f is None: raise Exception("pydeck could not write a file") f.close() if open_browser: display_html(realpath(f.name)) if notebook_display: from IPython.display import IFrame # noqa notebook_to_html_path = relpath(f.name) display( # noqa IFrame( os.path.join("./", notebook_to_html_path), width=iframe_width, height=iframe_height, )) return realpath(f.name)
def plot_voxelgrid(v_grid, cmap="Oranges", axis=False): scaled_shape = v_grid.shape / np.min( v_grid.shape) # shape: step size in discretization # coordinates returned from argwhere are inversed so use [:, ::-1] # Equivalent to np.transpose(x, [2, 1, 0]) points = np.argwhere(v_grid.vector)[:, ::-1] * scaled_shape s_m = plt.cm.ScalarMappable(cmap=cmap) # [:, :-1]: get rid of the last dimension. i. e (2 x 3 x 4 x 5) => (2 x 3 x 4) rgb = s_m.to_rgba( v_grid.vector.reshape(-1)[v_grid.vector.reshape(-1) > 0])[:, :-1] camera_position = points.max(0) + abs(points.max(0)) look = points.mean(0) if axis: axis_size = points.ptp() * 1.5 else: axis_size = 0 with open("plotVG.html", "w") as html: html.write( TEMPLATE_VG.format(camera_x=camera_position[0], camera_y=camera_position[1], camera_z=camera_position[2], look_x=look[0], look_y=look[1], look_z=look[2], X=points[:, 0].tolist(), Y=points[:, 1].tolist(), Z=points[:, 2].tolist(), R=rgb[:, 0].tolist(), G=rgb[:, 1].tolist(), B=rgb[:, 2].tolist(), S_x=scaled_shape[0], S_y=scaled_shape[2], S_z=scaled_shape[1], n_voxels=sum(v_grid.vector.reshape(-1) > 0), axis_size=axis_size)) return IFrame("plotVG.html", width=800, height=800)
def showInNetron(model_filename: str, address: str = None, port: int = 8081): """Shows the ONNX model in Jupyter Notebook. Args: model_filename (str): the path to the model file to show. address (str, optional): The IP address used by Netron to show the model graph. Defaults to None. port (int, optional): The port number use by Netron to show the model graph. Defaults to 8081. Returns: IFrame: The IFrame where the model is shown. """ if address is not None: address = netron.start( file=model_filename, address=(address, port), browse=False ) else: address = netron.start(file=model_filename, address=port, browse=False) return IFrame(src=f"http://{address[0]}:{address[1]}/", width="100%", height=400)
def show(self) -> None: """Show the object.""" temp_dir, temp_file = self.compile() # create temp file name filename = os.path.join(temp_dir, temp_file) if config['environment']['interactive']: from IPython.display import IFrame, display # open the file in the notebook display(IFrame(filename, width=600, height=300)) else: # open the file in the webbrowser webbrowser.open(r'file:///' + filename) # Wait for .1 second before temp file is deleted time.sleep(.1) # remove the temporal directory shutil.rmtree(temp_dir)