示例#1
0
 def run_notebook(self, name):
     """
     run a given notebook immediately.
     the job parameter is the name of the job script as in ipynb.
     Inserts and returns the Metadata document for the job.
     """
     notebook = self.get(name)
     meta_job = self.metadata(name)
     ts = datetime.datetime.now().strftime('%s')
     # execute
     try:
         ep = ExecutePreprocessor()
         ep.preprocess(notebook, {'metadata': {'path': '/'}})
     except Exception as e:
         status = str(e)
     else:
         status = 'OK'
         # record results
         meta_results = self.put(notebook,
                                 'results/{name}_{ts}'.format(**locals()))
         meta_results.attributes['source_job'] = name
         meta_results.save()
         job_results = meta_job.attributes.get('job_results', [])
         job_results.append(meta_results.name)
         meta_job.attributes['job_results'] = job_results
     # record final job status
     job_runs = meta_job.attributes.get('job_runs', {})
     job_runs[ts] = status
     meta_job.attributes['job_runs'] = job_runs
     meta_job.save()
     return meta_job
示例#2
0
def md2ipynb():
    assert len(sys.argv) == 3, 'usage: input.md output.rst'
    (src_fn, input_fn, output_fn) = sys.argv

    # timeout for each notebook, in sec
    timeout = 20 * 60
    # if enable evaluation
    do_eval = int(os.environ.get('EVAL', True))
    reader = notedown.MarkdownReader()
    with open(input_fn, 'r') as f:
        notebook = reader.read(f)
    notebook['metadata'].update({'language_info': {
        'name': 'R'
    }})  # need to add language info for syntax highlight
    if do_eval:
        tic = time.time()
        executor = ExecutePreprocessor(timeout=timeout, kernel_name='ir')
        print('%s: Evaluated %s in %f sec' %
              (src_fn, input_fn, time.time() - tic))
        try:
            notebook, resources = executor.preprocess(notebook, resources={})
        except CellExecutionError:
            msg = 'Error executing the notebook "%s".\n\n' % input_fn
            msg += 'See notebook "%s" for the traceback.' % output_fn
            print(msg)
            raise
        finally:
            with open(output_fn, 'w') as f:
                f.write(nbformat.writes(notebook).encode('utf8'))
    print('%s: Write results into %s' % (src_fn, output_fn))
示例#3
0
 def run_notebook(self, name, event=None):
     """
     run a given notebook immediately.
     the job parameter is the name of the job script as in ipynb.
     Inserts and returns the Metadata document for the job.
     """
     notebook = self.get(name)
     meta_job = self.metadata(name)
     ts = datetime.datetime.now()
     # execute
     ep_kwargs = {'timeout': None}
     ep_kwargs.update(meta_job.kind_meta.get('ep_kwargs', {}))
     try:
         if not meta_job.kind_meta.get('keep_output', False):
             resources = {
             }  # https://nbconvert.readthedocs.io/en/latest/api/preprocessors.html
             cp = ClearOutputPreprocessor()
             cp.preprocess(notebook, resources)
         ep = ExecutePreprocessor(**ep_kwargs)
         ep.preprocess(notebook, {'metadata': {'path': '/'}})
     except Exception as e:
         status = 'ERROR'
         message = str(e)
     else:
         status = 'OK'
         message = ''
     # record results
     meta_results = self.put(notebook,
                             'results/{name}_{ts}'.format(**locals()))
     meta_results.attributes['source_job'] = name
     meta_results.save()
     job_results = meta_job.attributes.get('job_results', [])
     job_results.append(meta_results.name)
     meta_job.attributes['job_results'] = job_results
     # record final job status
     job_runs = meta_job.attributes.get('job_runs', [])
     runstate = {
         'status': status,
         'ts': ts,
         'message': message,
         'results': meta_results.name if status == 'OK' else None
     }
     job_runs.append(runstate)
     meta_job.attributes['job_runs'] = job_runs
     # set event run state if event was specified
     if event:
         attrs = meta_job.attributes
         triggers = attrs['triggers'] = attrs.get('triggers', [])
         scheduled = (trigger for trigger in triggers
                      if trigger['event-kind'] == 'scheduled')
         for trigger in scheduled:
             if event == trigger['event']:
                 trigger['status'] = status
                 trigger['ts'] = ts
     meta_job.save()
     return meta_results
示例#4
0
def executenb(nb, cwd=None, km=None, **kwargs):
    """Execute a notebook and embed widget state."""
    resources = {}
    if cwd is not None:
        resources['metadata'] = {'path': cwd}
    ep = ExecutePreprocessor(**kwargs)
    with ep.setup_preprocessor(nb, resources, km=km):
        ep.log.info("Executing notebook with kernel: %s" % ep.kernel_name)
        nb, resources = super(ExecutePreprocessor,
                              ep).preprocess(nb, resources)
        nb.metadata.language_info = language_info(ep)
        widgets = extract_widget_state(ep)
        if widgets:
            nb.metadata.widgets = {WIDGET_STATE_MIMETYPE: widgets}
示例#5
0
 def run_notebook(self, name, event=None):
     """
     run a given notebook immediately.
     the job parameter is the name of the job script as in ipynb.
     Inserts and returns the Metadata document for the job.
     """
     notebook = self.get(name)
     meta_job = self.metadata(name)
     ts = datetime.datetime.now()
     # execute
     try:
         ep = ExecutePreprocessor()
         ep.preprocess(notebook, {'metadata': {'path': '/'}})
     except Exception as e:
         status = 'ERROR'
         message = str(e)
     else:
         status = 'OK'
         message = ''
         # record results
         meta_results = self.put(
             notebook, 'results/{name}_{ts}'.format(**locals()))
         meta_results.attributes['source_job'] = name
         meta_results.save()
         job_results = meta_job.attributes.get('job_results', [])
         job_results.append(meta_results.name)
         meta_job.attributes['job_results'] = job_results
     # record final job status
     job_runs = meta_job.attributes.get('job_runs', [])
     runstate = {
         'status': status,
         'ts': ts,
         'message': message,
         'results': meta_results.name if status == 'OK' else None
     }
     job_runs.append(runstate)
     meta_job.attributes['job_runs'] = job_runs
     # set event run state if event was specified
     if event:
         attrs = meta_job.attributes
         triggers = attrs['triggers'] = attrs.get('triggers', [])
         scheduled = (trigger for trigger in triggers
                      if trigger['event-kind'] == 'scheduled')
         for trigger in scheduled:
             if event == trigger['event']:
                 trigger['status'] = status
                 trigger['ts'] = ts
     return meta_job.save()
示例#6
0
 def test_all_notebooks(self):
     """ Test all notebooks except blacklist. Blacklisted notebooks take too long."""
     print("testing all notebooks")
     blacklist = ["SyntheticData.ipynb"]
     pythonkernel = "python" + str(sys.version_info[0])
     this_dir = os.path.dirname(__file__)
     nbpath = os.path.join(this_dir, "../notebooks/")
     # see http://nbconvert.readthedocs.io/en/stable/execute_api.html
     ep = ExecutePreprocessor(
         timeout=120, kernel_name=pythonkernel, interrupt_on_timeout=True
     )
     lfiles = glob.glob(nbpath + "*.ipynb")
     for notebook_filename in lfiles:
         if os.path.basename(notebook_filename) not in blacklist:
             print(">> Testing notebook", notebook_filename)
             t = time.time()
             self._execNotebook(ep, notebook_filename, nbpath)
             print(notebook_filename, "took %g seconds." % (time.time() - t))
def execute_notebook(notebook):
    # Configure the notebook executor and then run the notebook
    # given, returning the results to the call
    c = Config()
    c.ScriptExporter.preprocessors = [NarrativePreprocessor]
    nar_templates = os.path.join(os.environ.get('NARRATIVE_DIR',
                                                '.'), 'src', 'biokbase',
                                 'narrative', 'exporter', 'templates')
    c.TemplateExporter.template_path = ['.', nar_templates]

    # Initialize the notebook execution object, and run the notebook. If a
    # timeout (in seconds) is defined in KB_CELL_TIMEOUT, use that for
    # how long we allow a cell to run before timing it out, otherwise use
    # a default value of 60 minutes.
    # /tmp is the directory where the notebook will be run.
    if 'KB_CELL_TIMEOUT' in os.environ:
        timeout = int(os.environ['KB_CELL_TIMEOUT'])
    else:
        timeout = 3600
    ep = ExecutePreprocessor(timeout=timeout)
    resources = {'metadata': {'path': '/tmp'}}
    return (ep.preprocess(notebook, resources))
示例#8
0
 def JKConnect(self) -> None:
     runtime_dir = pathlib.Path(jupyter_core.paths.jupyter_runtime_dir())
     connection_files = runtime_dir.glob("kernel-*.json")
     source = '\n'.join(
         connection_file.name.lstrip('kernel-').rstrip('.json') + ' ' +
         datetime.fromtimestamp(connection_file.stat().st_ctime).strftime(
             "%m/%d %H:%M") for connection_file in connection_files)
     proc = subprocess.run("fzf-tmux|awk '{print $1}'",
                           input=source,
                           stdout=PIPE,
                           shell=True,
                           text=True)
     connection_file = 'kernel-%s.json' % proc.stdout.strip()
     connection_file = runtime_dir.joinpath(connection_file).as_posix()
     kc = BlockingKernelClient()
     try:
         kc.load_connection_file(connection_file)
         kc.execute_interactive('', timeout=1)
     except (TimeoutError, FileNotFoundError):
         self.nvim.command("echoerr 'Selected connection is dead!'")
     else:
         self.executor = ExecutePreprocessor()
         self.executor.kc = kc
         self.nvim.command("echo 'Successfully connected!'")
示例#9
0
                       gbref=gbref,
                       gblink=gblink,
                       templatesize=templatesize,
                       insertseguid=insertseguid,
                       finalcseguidZ=finalcseguidZ,
                       finalcseguidE=finalcseguidE,
                       fpn=fp.name,
                       fps=fp.seq,
                       rpn=rp.name,
                       rps=rp.seq)

    obj = notedown.MarkdownReader()

    nb = obj.to_notebook(content)

    pp = ExecutePreprocessor(timeout=600, kernel_name='python3')
    pp.timeout = 120  # seconds
    pp.interrupt_on_timeout = True

    pp.preprocess(nb, resources={})

    with open(newname, 'wt') as f:
        nbformat.write(nb, f)

#os.chdir(cwd)

# with open("README_template.md", "r", encoding="utf8") as f:
#     t=f.read()

# table = "| No. | TP | Promoter vector | Terminator vector | Jupyter nb |\n"
# table+= "|-----|----|-----------------|-------------------|------------|\n"
示例#10
0
def run(notebook, timeout=30):
    executor = ExecutePreprocessor(timeout=timeout)
    notebook, resources = executor.preprocess(notebook, resources={})
示例#11
0
def run(notebook, timeout=30, store_widget_state=True):
    executor = ExecutePreprocessor(timeout=timeout,
                                   store_widget_state=store_widget_state)
    notebook, resources = executor.preprocess(notebook, resources={})
示例#12
0
def run_code(source: tuple, kernel_name='python3'):
    """Run code blocks inside a jupyter kernel.

    Args:
        source (tuple): source code blocks
        kernel_name: name of the kernel (form the kernel spec) to be created
    """
    import IPython
    if pkg_version.parse(IPython.__version__) < pkg_version.parse('7.6.0'):
        raise RuntimeError("IPython version {} not supported."
                           " Kale requires at least version 7.6.0.".format(
                               IPython.__version__))

    # new notebook
    spec = get_kernel_spec(kernel_name)
    notebook = nbformat.v4.new_notebook(
        metadata={
            'kernelspec': {
                'display_name': spec.display_name,
                'language': spec.language,
                'name': kernel_name,
            }
        })
    notebook.cells = [nbformat.v4.new_code_cell(s) for s in source]
    # these parameters are passed to nbconvert.ExecutePreprocessor
    jupyter_execute_kwargs = dict(timeout=-1,
                                  allow_errors=True,
                                  store_widget_state=True)

    resources = {}
    # cwd: If supplied, the kernel will run in this directory
    # resources['metadata'] = {'path': cwd}
    ep = ExecutePreprocessor(**jupyter_execute_kwargs)
    km = ep.kernel_manager_class(kernel_name=kernel_name, config=ep.config)
    # start_kernel supports several additional arguments via **kw
    km.start_kernel(extra_arguments=ep.extra_arguments)
    kc = km.client()
    kc.start_channels()
    try:
        kc.wait_for_ready(timeout=60)
    except RuntimeError:
        kc.stop_channels()
        raise
    kc.allow_stdin = False

    def signal_handler(_signal, _frame):
        raise KaleKernelException()

    # this signal is used by the thread in case an error message is received
    # by the kernel. Running sys.exit() inside the thread would terminate
    # just the thread itself, not the main process. Calling os._exit() can be
    # dangerous as the process is killed instantly (files and connections are
    # not closed, for example). With a signal we can capture the ExitCommand
    # exception from the main process and exit gracefully.
    signal.signal(signal.SIGUSR1, signal_handler)
    # start separate thread in to capture and print stdout, stderr, errors.
    # daemon mode will make the watcher thread die when the main one returns.
    x = threading.Thread(target=capture_streams,
                         args=(
                             kc,
                             True,
                         ),
                         daemon=True)
    x.start()

    try:
        # start preprocessor: run each code cell and capture the output
        ep.preprocess(notebook, resources, km=km)
    except KaleKernelException:
        # exit gracefully with error
        sys.exit(-1)
    # Give some time to the stream watcher thread to receive all messages from
    # the kernel before shutting down.
    time.sleep(1)
    km.shutdown_kernel()

    result = process_outputs(notebook.cells)
    return result
示例#13
0
#!/usr/bin/env python
#-*-coding:utf-8-*-
import sys
import os
import nbformat
from nbconvert.preprocessors.execute import ExecutePreprocessor

input_fn = sys.argv[1]
output_fn = sys.argv[2]

timeout = 60 * 20

notebook = nbformat.read(input_fn, as_version=4)

executor = ExecutePreprocessor(timeout=timeout)
notebook, resources = executor.preprocess(notebook, resources={})

nbformat.write(notebook, output_fn)
示例#14
0
文件: jobs.py 项目: omegaml/omegaml
    def run_notebook(self, name, event=None, timeout=None):
        """ run a given notebook immediately.

        Args:
            name (str): the name of the jobfile
            event (str): an event name
            timeout (int): timeout in seconds

        Returns:
            Metadata of results

        See Also:
            * nbconvert https://nbconvert.readthedocs.io/en/latest/execute_api.html
        """
        notebook = self.get(name)
        meta_job = self.metadata(name)
        ts = datetime.datetime.now()
        # execute kwargs
        # -- see ExecuteProcessor class
        # -- see https://nbconvert.readthedocs.io/en/latest/execute_api.html
        ep_kwargs = {
            # avoid timeouts to stop kernel
            'timeout': timeout,
            # avoid kernel at exit functions
            # -- this stops ipykernel AttributeError 'send_multipart'
            'shutdown_kernel': 'immediate',
            # set kernel name, blank is default
            # -- e.g. python3, ir
            # -- see https://stackoverflow.com/a/47053020/890242
            'kernel_name': '',
        }
        # other interesting options
        ep_kwargs.update(meta_job.kind_meta.get('ep_kwargs', {}))
        try:
            resources = {
                'metadata': {
                    'path': self.defaults.OMEGA_TMP,
                }
            }
            if not meta_job.kind_meta.get('keep_output', False):
                # https://nbconvert.readthedocs.io/en/latest/api/preprocessors.html
                cp = ClearOutputPreprocessor()
                cp.preprocess(notebook, resources)
            ep = ExecutePreprocessor(**ep_kwargs)
            ep.preprocess(notebook, resources)
        except Exception as e:
            status = 'ERROR'
            message = str(e)
        else:
            status = 'OK'
            message = ''
        finally:
            del ep
        # record results
        meta_results = self.put(notebook,
                                'results/{name}_{ts}'.format(**locals()))
        meta_results.attributes['source_job'] = name
        meta_results.save()
        job_results = meta_job.attributes.get('job_results', [])
        job_results.append(meta_results.name)
        meta_job.attributes['job_results'] = job_results
        # record final job status
        job_runs = meta_job.attributes.get('job_runs', [])
        runstate = {
            'status': status,
            'ts': ts,
            'message': message,
            'results': meta_results.name if status == 'OK' else None
        }
        job_runs.append(runstate)
        meta_job.attributes['job_runs'] = job_runs
        # set event run state if event was specified
        if event:
            attrs = meta_job.attributes
            triggers = attrs['triggers'] = attrs.get('triggers', [])
            scheduled = (trigger for trigger in triggers
                         if trigger['event-kind'] == 'scheduled')
            for trigger in scheduled:
                if event == trigger['event']:
                    trigger['status'] = status
                    trigger['ts'] = ts
        meta_job.save()
        return meta_results
示例#15
0
import os
import sys

from nbconvert.preprocessors.execute import ExecutePreprocessor
from nbconvert.nbconvertapp import NbConvertApp
from nbconvert.preprocessors import ExecutePreprocessor
from nbconvert.preprocessors.execute import CellExecutionError
import nbformat

folder = 'notebooks'
os.chdir(folder)
nb_list = [fname for fname in os.listdir('.') if fname.endswith('.ipynb')]
nb_list.sort()

ex = ExecutePreprocessor()
ex.timeout = 180  # seconds
ex.interrupt_on_timeout = True

has_error = False
for notebook in nb_list:
    nb = nbformat.read(notebook, as_version=4)
    try:
        nb_executed, resources = ex.preprocess(nb, resources={})
    except CellExecutionError as e:
        print('Fail: %s \n%s\n\n' % (notebook, e.traceback[-1]))
        has_error = True

os.chdir('..')
sys.exit(-1 if has_error else 0)