def post(self): defined = [] for v in self.application.parameters: if v.type is bool: inp = v.with_value( self.get_argument(v.name, default='off') == 'on') else: inp = v.with_value(v.type(self.get_argument(v.name))) defined.append(inp) nb = replace_definitions(self.application.nb, defined) nb = execute(nb, cwd=os.path.dirname(self.application.path)) output, _ = HTMLExporter().from_notebook_node(nb) self.write(output)
def create_qc_report(subject_dir, outdir): # get location of this file template_name = get_package_data_name('report_template.ipynb') template_nb = nbformat.read(template_name, as_version=4) # extract original parameters from the template notebook orig_parameters = extract_parameters(template_nb) # replace with parameters for this subject and execute notebook new_parameters = parameter_values(orig_parameters, subject_dir=str(subject_dir), outdir=str(outdir)) new_nb = replace_definitions(template_nb, new_parameters, execute=False) _ = execute(new_nb) # save notebook in subject's main output directory new_nb_name = Path(subject_dir)/outdir/"hcp_asl_report.ipynb" with open(new_nb_name, "w") as f: nbformat.write(new_nb, f)
def execute_and_clear(notebook_file_name, source_dir, delete_repeated=True): nb = nbformat.read(notebook_file_name, nbformat.NO_CONVERT) replacements = [] for cell in nb["cells"]: code_cell = cell.get("cell_type", None) == "code" markdown_cell = cell.get("cell_type", None) == "markdown" source = cell.get("source", "") if code_cell and "# Setup" not in source: continue if markdown_cell and ("#### Explanation" in cell["source"]): continue if "metadata" in cell and "pycharm" in cell["metadata"]: del cell["metadata"]["pycharm"] replacements.append(cell) nb["cells"] = replacements executed = nbclient.execute(nb, cwd=source_dir) cop = pre.ClearOutputPreprocessor() nb, _ = cop.preprocess(executed, {}) return nb
#!/usr/bin/env python3 """Example of using nbparameterise API to substitute variables in 'batch mode' """ from nbclient import execute import nbformat from nbparameterise import extract_parameters, parameter_values, replace_definitions stock_names = ['AAPL', 'MSFT', 'GOOG'] nb = nbformat.read("Stock display.ipynb", as_version=4) orig_parameters = extract_parameters(nb) for name in stock_names: print("Running for stock", name) # Update the parameters and run the notebook params = parameter_values(orig_parameters, stock=name) new_nb = replace_definitions(nb, params) execute(new_nb) # Save with open("Stock display %s.ipynb" % name, 'w') as f: nbformat.write(new_nb, f)
def add_notebook_outputs(env, ntbk, file_path=None, show_traceback=False): """ Add outputs to a NotebookNode by pulling from cache. Function to get the database instance. Get the cached output of the notebook and merge it with the original notebook. If there is no cached output, checks if there was error during execution, then saves the traceback to a log file. """ # If we have a jupyter_cache, see if there's a cache for this notebook file_path = file_path or env.doc2path(env.docname) dest_path = Path(env.app.outdir) reports_dir = str(dest_path) + "/reports" path_cache = False if not is_valid_exec_file(env, env.docname): return ntbk if "cache" in env.config["jupyter_execute_notebooks"]: path_cache = env.path_cache if not path_cache: if "off" not in env.config["jupyter_execute_notebooks"]: has_outputs = _read_nb_output_cells( file_path, env.config["jupyter_execute_notebooks"] ) if not has_outputs: LOGGER.info("Executing: {}".format(env.docname)) ntbk = execute(ntbk, cwd=Path(file_path).parent) else: LOGGER.info( "Did not execute {}. " "Set jupyter_execute_notebooks to `force` to execute".format( env.docname ) ) return ntbk cache_base = get_cache(path_cache) # Use relpath here in case Sphinx is building from a non-parent folder r_file_path = Path(os.path.relpath(file_path, Path().resolve())) try: _, ntbk = cache_base.merge_match_into_notebook(ntbk) except KeyError: message = ( f"Couldn't find cache key for notebook file {str(r_file_path)}. " "Outputs will not be inserted." ) try: stage_record = cache_base.get_staged_record(file_path) except KeyError: stage_record = None if stage_record and stage_record.traceback: # save the traceback to a log file ensuredir(reports_dir) file_name = os.path.splitext(r_file_path.name)[0] full_path = reports_dir + "/{}.log".format(file_name) with open(full_path, "w", encoding="utf8") as log_file: log_file.write(stage_record.traceback) message += "\n Last execution failed with traceback saved in {}".format( full_path ) if show_traceback: message += "\n" + stage_record.traceback LOGGER.error(message) # This is a 'fix' for jupyter_sphinx, which requires this value for dumping the # script file, to stop it from raising an exception if not found: # Normally it would be added from the executed notebook but, # since we are already logging an error, we don't want to block the whole build. # So here we just add a dummy .txt extension if "language_info" not in ntbk.metadata: ntbk.metadata["language_info"] = nbf.from_dict({"file_extension": ".txt"}) else: LOGGER.verbose("Merged cached outputs into %s", str(r_file_path)) return ntbk
website_dir = os.path.join("..", "website") if not os.path.exists(website_dir): os.mkdir(website_dir) spyder_dir = os.path.join("..", "course", "introduction", "spyder") if not os.path.exists(spyder_dir): os.mkdir(spyder_dir) source_dir = "../solutions/introduction/" nb_files = glob.glob(os.path.join(source_dir, "*.ipynb")) nb_files = sorted(nb_files, key=lambda v: key(v)) for nb_file in nb_files: print(f"Processing {nb_file}") nb = nbformat.read(nb_file, 4) executed = nbclient.execute(nb, cwd=source_dir, kernel_name="python3") print(f"Writing executed version of {nb_file}") nbformat.write(executed, nb_file, nbformat.NO_CONVERT) cop = pre.ClearOutputPreprocessor() nb, _ = cop.preprocess(executed, {}) for cell in nb["cells"]: if isinstance(cell, MutableMapping): if ("cell_type" not in cell or cell["cell_type"] != "code" or "# Setup" in cell["source"]): continue cell["source"] = "" if "metadata" in cell and "pycharm" in cell["metadata"]: del cell["metadata"]["pycharm"] _, base = os.path.split(nb_file) out = os.path.abspath(os.path.join("..", "course", "introduction", base))
# In a docker setup, run as # sudo run_from_host/update_mappings.sh # # import nbclient import nbformat import argparse parser = argparse.ArgumentParser(prog="update_mappings") parser.add_argument("mapping_notebook", help="the notebook the stores the mappings") args = parser.parse_args() with open(args.mapping_notebook) as f: nb = nbformat.read(f, as_version=4) nbclient.execute(new_nb)
from nbformat import read from nbclient import execute import json nb = read("ipywidgets.ipynb", 4) with open('ipywidgets_executed.ipynb', 'w') as f: f.write(json.dumps(execute(nb, store_widget_state=True)))