Beispiel #1
0
def generate_notebook_outputs(
    env: BuildEnvironment,
    ntbk: nbf.NotebookNode,
    file_path: Optional[str] = None,
    show_traceback: bool = False,
) -> nbf.NotebookNode:
    """
    Add outputs to a NotebookNode by pulling from cache.

    Function to get the database instance. Get the cached output of the notebook
    and merge it with the original notebook. If there is no cached output,
    checks if there was error during execution, then saves the traceback to a log file.
    """

    # check if the file is of a format that may be associated with outputs
    if not is_valid_exec_file(env, env.docname):
        return ntbk

    # If we have a jupyter_cache, see if there's a cache for this notebook
    file_path = file_path or env.doc2path(env.docname)

    execution_method = env.config["jupyter_execute_notebooks"]  # type: str

    path_to_cache = env.nb_path_to_cache if "cache" in execution_method else None

    if not path_to_cache and "off" in execution_method:
        return ntbk

    if not path_to_cache:

        if execution_method == "auto" and nb_has_all_output(file_path):
            LOGGER.info(
                "Did not execute %s. "
                "Set jupyter_execute_notebooks to `force` to execute",
                env.docname,
            )
        else:
            if env.config["execution_in_temp"]:
                with tempfile.TemporaryDirectory() as tmpdirname:
                    LOGGER.info("Executing: %s in temporary directory",
                                env.docname)
                    result = single_nb_execution(
                        ntbk,
                        cwd=tmpdirname,
                        timeout=env.config["execution_timeout"],
                        allow_errors=env.config["execution_allow_errors"],
                    )
            else:
                cwd = Path(file_path).parent
                LOGGER.info("Executing: %s in: %s", env.docname, cwd)
                result = single_nb_execution(
                    ntbk,
                    cwd=cwd,
                    timeout=env.config["execution_timeout"],
                    allow_errors=env.config["execution_allow_errors"],
                )

            report_path = None
            if result.err:
                report_path, message = _report_exec_fail(
                    env,
                    Path(file_path).name,
                    result.exc_string,
                    show_traceback,
                    "Execution Failed with traceback saved in {}",
                )
                LOGGER.error(message)

            ntbk = result.nb

            env.nb_execution_data_changed = True
            env.nb_execution_data[env.docname] = {
                "mtime": datetime.now().timestamp(),
                "runtime": result.time,
                "method": execution_method,
                "succeeded": False if result.err else True,
            }
            if report_path:
                env.nb_execution_data[env.docname]["error_log"] = report_path

        return ntbk

    cache_base = get_cache(path_to_cache)
    # Use relpath here in case Sphinx is building from a non-parent folder
    r_file_path = Path(os.path.relpath(file_path, Path().resolve()))

    # default execution data
    runtime = None
    succeeded = False
    report_path = None

    try:
        pk, ntbk = cache_base.merge_match_into_notebook(ntbk)
    except KeyError:
        message = (
            f"Couldn't find cache key for notebook file {str(r_file_path)}. "
            "Outputs will not be inserted.")
        try:
            stage_record = cache_base.get_staged_record(file_path)
        except KeyError:
            stage_record = None
        if stage_record and stage_record.traceback:
            report_path, suffix = _report_exec_fail(
                env,
                r_file_path.name,
                stage_record.traceback,
                show_traceback,
                "\n  Last execution failed with traceback saved in {}",
            )
            message += suffix

        LOGGER.error(message)

    else:
        LOGGER.verbose("Merged cached outputs into %s", str(r_file_path))
        succeeded = True
        try:
            runtime = cache_base.get_cache_record(pk).data.get(
                "execution_seconds", None)
        except Exception:
            pass

    env.nb_execution_data_changed = True
    env.nb_execution_data[env.docname] = {
        "mtime": datetime.now().timestamp(),
        "runtime": runtime,
        "method": execution_method,
        "succeeded": succeeded,
    }
    if report_path:
        env.nb_execution_data[env.docname]["error_log"] = report_path

    return ntbk