Esempio n. 1
0
def md2ipynb():
    assert len(sys.argv) == 3, 'usage: input.md output.rst'
    (src_fn, input_fn, output_fn) = sys.argv

    # timeout for each notebook, in sec
    timeout = 20 * 60
    # if enable evaluation
    do_eval = int(os.environ.get('EVAL', True))
    reader = notedown.MarkdownReader()
    with open(input_fn, 'r') as f:
        notebook = reader.read(f)
    notebook['metadata'].update({'language_info': {
        'name': 'R'
    }})  # need to add language info for syntax highlight
    if do_eval:
        tic = time.time()
        executor = ExecutePreprocessor(timeout=timeout, kernel_name='ir')
        print('%s: Evaluated %s in %f sec' %
              (src_fn, input_fn, time.time() - tic))
        try:
            notebook, resources = executor.preprocess(notebook, resources={})
        except CellExecutionError:
            msg = 'Error executing the notebook "%s".\n\n' % input_fn
            msg += 'See notebook "%s" for the traceback.' % output_fn
            print(msg)
            raise
        finally:
            with open(output_fn, 'w') as f:
                f.write(nbformat.writes(notebook).encode('utf8'))
    print('%s: Write results into %s' % (src_fn, output_fn))
Esempio n. 2
0
 def run_notebook(self, name):
     """
     run a given notebook immediately.
     the job parameter is the name of the job script as in ipynb.
     Inserts and returns the Metadata document for the job.
     """
     notebook = self.get(name)
     meta_job = self.metadata(name)
     ts = datetime.datetime.now().strftime('%s')
     # execute
     try:
         ep = ExecutePreprocessor()
         ep.preprocess(notebook, {'metadata': {'path': '/'}})
     except Exception as e:
         status = str(e)
     else:
         status = 'OK'
         # record results
         meta_results = self.put(notebook,
                                 'results/{name}_{ts}'.format(**locals()))
         meta_results.attributes['source_job'] = name
         meta_results.save()
         job_results = meta_job.attributes.get('job_results', [])
         job_results.append(meta_results.name)
         meta_job.attributes['job_results'] = job_results
     # record final job status
     job_runs = meta_job.attributes.get('job_runs', {})
     job_runs[ts] = status
     meta_job.attributes['job_runs'] = job_runs
     meta_job.save()
     return meta_job
Esempio n. 3
0
 def test_demo(self):
     import nbformat
     from nbconvert.preprocessors.execute import ExecutePreprocessor
     here = os.path.dirname(__file__)
     demo = os.path.join(here,'Demo.ipynb')
     nb = nbformat.read(demo, as_version=4)
     pp = ExecutePreprocessor()
     pp.allow_errors = False
     pp.preprocess(nb, resources={})
Esempio n. 4
0
 def run_notebook(self, name, event=None):
     """
     run a given notebook immediately.
     the job parameter is the name of the job script as in ipynb.
     Inserts and returns the Metadata document for the job.
     """
     notebook = self.get(name)
     meta_job = self.metadata(name)
     ts = datetime.datetime.now()
     # execute
     ep_kwargs = {'timeout': None}
     ep_kwargs.update(meta_job.kind_meta.get('ep_kwargs', {}))
     try:
         if not meta_job.kind_meta.get('keep_output', False):
             resources = {
             }  # https://nbconvert.readthedocs.io/en/latest/api/preprocessors.html
             cp = ClearOutputPreprocessor()
             cp.preprocess(notebook, resources)
         ep = ExecutePreprocessor(**ep_kwargs)
         ep.preprocess(notebook, {'metadata': {'path': '/'}})
     except Exception as e:
         status = 'ERROR'
         message = str(e)
     else:
         status = 'OK'
         message = ''
     # record results
     meta_results = self.put(notebook,
                             'results/{name}_{ts}'.format(**locals()))
     meta_results.attributes['source_job'] = name
     meta_results.save()
     job_results = meta_job.attributes.get('job_results', [])
     job_results.append(meta_results.name)
     meta_job.attributes['job_results'] = job_results
     # record final job status
     job_runs = meta_job.attributes.get('job_runs', [])
     runstate = {
         'status': status,
         'ts': ts,
         'message': message,
         'results': meta_results.name if status == 'OK' else None
     }
     job_runs.append(runstate)
     meta_job.attributes['job_runs'] = job_runs
     # set event run state if event was specified
     if event:
         attrs = meta_job.attributes
         triggers = attrs['triggers'] = attrs.get('triggers', [])
         scheduled = (trigger for trigger in triggers
                      if trigger['event-kind'] == 'scheduled')
         for trigger in scheduled:
             if event == trigger['event']:
                 trigger['status'] = status
                 trigger['ts'] = ts
     meta_job.save()
     return meta_results
Esempio n. 5
0
def pYPKa_ZE_ipynb_generator(tp, dir_="pYPKa_ZE_vectors"):

    cwd = os.getcwd()

    try:
        os.makedirs(dir_)
    except OSError as exception:
        if exception.errno == errno.EEXIST:
            pass
        else:
            print("The {} directory could not be created".format(dir_))
            return None

    os.chdir(dir_)

    with open("standard_primers.txt","w") as f: f.write(read_data_file("standard_primers.txt"))
    with open("pYPKa.gb","w") as f: f.write(read_data_file("pYPKa.gb"))
    with open("pYPK_ZE.png","w") as f: f.write(read_bin_file("pYPK_ZE.png"))
    with open(tp.id+".gb","w") as f: f.write(tp.format("gb"))

    nbtemp = read_data_file("nb_template_pYPKa_ZE_insert.md")

    name = "pYPKa_ZE_{}.ipynb".format(tp.id)

    obj = notedown.MarkdownReader()

    nb = obj.to_notebook(nbtemp.format(tp=tp.id))

    pp = ExecutePreprocessor()
    pp.timeout = 120 # seconds
    pp.interrupt_on_timeout = True

    shell = InteractiveShell.instance()

    nb_executed, resources = pp.preprocess(nb, resources={})

    g={}
    l={}

    from io import StringIO
    old_stdout = sys.stdout
    redirected_output = sys.stdout = StringIO()

    for cell in nb.cells:
        if cell.cell_type == 'code':
            code = shell.input_transformer_manager.transform_cell(cell.source)
            exec(code, g, l)

    sys.stdout = old_stdout

    nbformat.write(nb, name)

    os.chdir(cwd)

    return FileLinks(dir_)
Esempio n. 6
0
def executenb(nb, cwd=None, km=None, **kwargs):
    """Execute a notebook and embed widget state."""
    resources = {}
    if cwd is not None:
        resources['metadata'] = {'path': cwd}
    ep = ExecutePreprocessor(**kwargs)
    with ep.setup_preprocessor(nb, resources, km=km):
        ep.log.info("Executing notebook with kernel: %s" % ep.kernel_name)
        nb, resources = super(ExecutePreprocessor,
                              ep).preprocess(nb, resources)
        nb.metadata.language_info = language_info(ep)
        widgets = extract_widget_state(ep)
        if widgets:
            nb.metadata.widgets = {WIDGET_STATE_MIMETYPE: widgets}
Esempio n. 7
0
 def run_notebook(self, name, event=None):
     """
     run a given notebook immediately.
     the job parameter is the name of the job script as in ipynb.
     Inserts and returns the Metadata document for the job.
     """
     notebook = self.get(name)
     meta_job = self.metadata(name)
     ts = datetime.datetime.now()
     # execute
     try:
         ep = ExecutePreprocessor()
         ep.preprocess(notebook, {'metadata': {'path': '/'}})
     except Exception as e:
         status = 'ERROR'
         message = str(e)
     else:
         status = 'OK'
         message = ''
         # record results
         meta_results = self.put(
             notebook, 'results/{name}_{ts}'.format(**locals()))
         meta_results.attributes['source_job'] = name
         meta_results.save()
         job_results = meta_job.attributes.get('job_results', [])
         job_results.append(meta_results.name)
         meta_job.attributes['job_results'] = job_results
     # record final job status
     job_runs = meta_job.attributes.get('job_runs', [])
     runstate = {
         'status': status,
         'ts': ts,
         'message': message,
         'results': meta_results.name if status == 'OK' else None
     }
     job_runs.append(runstate)
     meta_job.attributes['job_runs'] = job_runs
     # set event run state if event was specified
     if event:
         attrs = meta_job.attributes
         triggers = attrs['triggers'] = attrs.get('triggers', [])
         scheduled = (trigger for trigger in triggers
                      if trigger['event-kind'] == 'scheduled')
         for trigger in scheduled:
             if event == trigger['event']:
                 trigger['status'] = status
                 trigger['ts'] = ts
     return meta_job.save()
def execute_notebook(notebook):
    # Configure the notebook executor and then run the notebook
    # given, returning the results to the call
    c = Config()
    c.ScriptExporter.preprocessors = [NarrativePreprocessor]
    nar_templates = os.path.join(os.environ.get('NARRATIVE_DIR',
                                                '.'), 'src', 'biokbase',
                                 'narrative', 'exporter', 'templates')
    c.TemplateExporter.template_path = ['.', nar_templates]

    # Initialize the notebook execution object, and run the notebook. If a
    # timeout (in seconds) is defined in KB_CELL_TIMEOUT, use that for
    # how long we allow a cell to run before timing it out, otherwise use
    # a default value of 60 minutes.
    # /tmp is the directory where the notebook will be run.
    if 'KB_CELL_TIMEOUT' in os.environ:
        timeout = int(os.environ['KB_CELL_TIMEOUT'])
    else:
        timeout = 3600
    ep = ExecutePreprocessor(timeout=timeout)
    resources = {'metadata': {'path': '/tmp'}}
    return (ep.preprocess(notebook, resources))
Esempio n. 9
0
 def JKConnect(self) -> None:
     runtime_dir = pathlib.Path(jupyter_core.paths.jupyter_runtime_dir())
     connection_files = runtime_dir.glob("kernel-*.json")
     source = '\n'.join(
         connection_file.name.lstrip('kernel-').rstrip('.json') + ' ' +
         datetime.fromtimestamp(connection_file.stat().st_ctime).strftime(
             "%m/%d %H:%M") for connection_file in connection_files)
     proc = subprocess.run("fzf-tmux|awk '{print $1}'",
                           input=source,
                           stdout=PIPE,
                           shell=True,
                           text=True)
     connection_file = 'kernel-%s.json' % proc.stdout.strip()
     connection_file = runtime_dir.joinpath(connection_file).as_posix()
     kc = BlockingKernelClient()
     try:
         kc.load_connection_file(connection_file)
         kc.execute_interactive('', timeout=1)
     except (TimeoutError, FileNotFoundError):
         self.nvim.command("echoerr 'Selected connection is dead!'")
     else:
         self.executor = ExecutePreprocessor()
         self.executor.kc = kc
         self.nvim.command("echo 'Successfully connected!'")
Esempio n. 10
0
 def test_all_notebooks(self):
     """ Test all notebooks except blacklist. Blacklisted notebooks take too long."""
     print("testing all notebooks")
     blacklist = ["SyntheticData.ipynb"]
     pythonkernel = "python" + str(sys.version_info[0])
     this_dir = os.path.dirname(__file__)
     nbpath = os.path.join(this_dir, "../notebooks/")
     # see http://nbconvert.readthedocs.io/en/stable/execute_api.html
     ep = ExecutePreprocessor(
         timeout=120, kernel_name=pythonkernel, interrupt_on_timeout=True
     )
     lfiles = glob.glob(nbpath + "*.ipynb")
     for notebook_filename in lfiles:
         if os.path.basename(notebook_filename) not in blacklist:
             print(">> Testing notebook", notebook_filename)
             t = time.time()
             self._execNotebook(ep, notebook_filename, nbpath)
             print(notebook_filename, "took %g seconds." % (time.time() - t))
Esempio n. 11
0
class Executor(object):
    def __init__(self, nvim: Nvim) -> None:
        self.nvim = nvim
        #        self.html_path = tempfile.mktemp(suffix=".html")
        self.html_path = '/tmp/executor.html'
        self.project_root = pathlib.Path(__file__).parent / ('../' * 3)
        self.executor = None
        self.html_exporter = nbconvert.exporters.HTMLExporter()

    def print(self, txt: Any) -> None:
        txt = str(txt)
        self.nvim.command('new')
        for line in txt.splitlines():
            self.nvim.current.buffer.append(line)

    @staticmethod
    def parse_traceback(line: str) -> Tuple[str, int, str]:
        f = line.lstrip('\033[0;32m').split('\033')[0]
        l = int(line.split('-> ')[-1].split('\033')[0])
        m = line.split('in \033[0;36m')[-1].split('\033')[0]
        return f, l, m

    @staticmethod
    def code_surjection(buffer: Buffer,
                        nb: NotebookNode) -> List[Tuple[int, int]]:
        surjection = [(-1, -1)] * len(buffer)
        i = 0
        for c, cell in enumerate(nb.cells):
            for l, line in enumerate(cell.source.splitlines()):
                while line != buffer[i]:
                    i += 1
                surjection[i] = (c, l)
        return surjection

    @pynvim.command('JNConnect')
    def JKConnect(self) -> None:
        runtime_dir = pathlib.Path(jupyter_core.paths.jupyter_runtime_dir())
        connection_files = runtime_dir.glob("kernel-*.json")
        source = '\n'.join(
            connection_file.name.lstrip('kernel-').rstrip('.json') + ' ' +
            datetime.fromtimestamp(connection_file.stat().st_ctime).strftime(
                "%m/%d %H:%M") for connection_file in connection_files)
        proc = subprocess.run("fzf-tmux|awk '{print $1}'",
                              input=source,
                              stdout=PIPE,
                              shell=True,
                              text=True)
        connection_file = 'kernel-%s.json' % proc.stdout.strip()
        connection_file = runtime_dir.joinpath(connection_file).as_posix()
        kc = BlockingKernelClient()
        try:
            kc.load_connection_file(connection_file)
            kc.execute_interactive('', timeout=1)
        except (TimeoutError, FileNotFoundError):
            self.nvim.command("echoerr 'Selected connection is dead!'")
        else:
            self.executor = ExecutePreprocessor()
            self.executor.kc = kc
            self.nvim.command("echo 'Successfully connected!'")

    @pynvim.command('JNRun', sync=False)
    def JNRun(self) -> None:
        current_line = self.nvim.current.line
        buffer = '\n'.join(self.nvim.current.buffer)
        nb = jupytext.reads(buffer, {'extension': '.py'})
        surjection = self.code_surjection(self.nvim.current.buffer, nb)
        current_line = self.nvim.call('line', '.') - 1
        current_cell = surjection[current_line][0]
        if current_cell == -1:
            self.nvim.command('echoerr "Current line is out of cell."')
            return

        # cell execution
        _, nb.cells[current_cell].outputs = self.executor.run_cell(
            nb.cells[current_cell], current_cell)
        #nb.cells[current_cell], resources = self.executor.preprocess_cell(nb.cells[current_cell], {}, current_cell)

        # error handling
        for output in nb.cells[current_cell].outputs:
            if output.output_type == 'error':
                f, l, m = self.parse_traceback(output.traceback[2])
                f = self.nvim.current.buffer.name
                l = surjection.index((current_cell, l - 1)) + 1
                self.nvim.command('enew|setl nohidden|setl bt=nofile')
                self.nvim.current.buffer.append('  File "%s", line %d, in %s' %
                                                (f, l, m))
                for line in output.traceback[3:-1]:
                    f, l, m = self.parse_traceback(line)
                    self.nvim.current.buffer.append(
                        '  File "%s", line %d, in %s' % (f, l, m))
                self.nvim.command('compiler python|cbuffer|cw|set hidden')
                break

        # html export
        script, resources = self.html_exporter.from_notebook_node(nb)
        script = script.splitlines()
        script.insert(
            3,
            '<script type="text/javascript" src="http://livejs.com/live.js"></script>'
        )
        script = '\n'.join(script)
        with open(self.html_path, "w") as f:
            f.write(script)

    @pynvim.command('JNDevTest', nargs='*', sync=True)
    def JNDevTest(self, args):
        self.nvim.command('UpdateRemotePlugins')
        subprocess.call('tmux split -h "nvim %s"' %
                        (self.project_root / 'test/test.py'),
                        shell=True)
Esempio n. 12
0
                       gbref=gbref,
                       gblink=gblink,
                       templatesize=templatesize,
                       insertseguid=insertseguid,
                       finalcseguidZ=finalcseguidZ,
                       finalcseguidE=finalcseguidE,
                       fpn=fp.name,
                       fps=fp.seq,
                       rpn=rp.name,
                       rps=rp.seq)

    obj = notedown.MarkdownReader()

    nb = obj.to_notebook(content)

    pp = ExecutePreprocessor(timeout=600, kernel_name='python3')
    pp.timeout = 120  # seconds
    pp.interrupt_on_timeout = True

    pp.preprocess(nb, resources={})

    with open(newname, 'wt') as f:
        nbformat.write(nb, f)

#os.chdir(cwd)

# with open("README_template.md", "r", encoding="utf8") as f:
#     t=f.read()

# table = "| No. | TP | Promoter vector | Terminator vector | Jupyter nb |\n"
# table+= "|-----|----|-----------------|-------------------|------------|\n"
Esempio n. 13
0
def run(notebook, timeout=30):
    executor = ExecutePreprocessor(timeout=timeout)
    notebook, resources = executor.preprocess(notebook, resources={})
Esempio n. 14
0
def run(notebook, timeout=30, store_widget_state=True):
    executor = ExecutePreprocessor(timeout=timeout,
                                   store_widget_state=store_widget_state)
    notebook, resources = executor.preprocess(notebook, resources={})
Esempio n. 15
0
def run(notebook, timeout=30):
    executor = ExecutePreprocessor(timeout=timeout)
    notebook, resources = executor.preprocess(notebook, resources={})
Esempio n. 16
0
def run_code(source: tuple, kernel_name='python3'):
    """Run code blocks inside a jupyter kernel.

    Args:
        source (tuple): source code blocks
        kernel_name: name of the kernel (form the kernel spec) to be created
    """
    import IPython
    if pkg_version.parse(IPython.__version__) < pkg_version.parse('7.6.0'):
        raise RuntimeError("IPython version {} not supported."
                           " Kale requires at least version 7.6.0.".format(
                               IPython.__version__))

    # new notebook
    spec = get_kernel_spec(kernel_name)
    notebook = nbformat.v4.new_notebook(
        metadata={
            'kernelspec': {
                'display_name': spec.display_name,
                'language': spec.language,
                'name': kernel_name,
            }
        })
    notebook.cells = [nbformat.v4.new_code_cell(s) for s in source]
    # these parameters are passed to nbconvert.ExecutePreprocessor
    jupyter_execute_kwargs = dict(timeout=-1,
                                  allow_errors=True,
                                  store_widget_state=True)

    resources = {}
    # cwd: If supplied, the kernel will run in this directory
    # resources['metadata'] = {'path': cwd}
    ep = ExecutePreprocessor(**jupyter_execute_kwargs)
    km = ep.kernel_manager_class(kernel_name=kernel_name, config=ep.config)
    # start_kernel supports several additional arguments via **kw
    km.start_kernel(extra_arguments=ep.extra_arguments)
    kc = km.client()
    kc.start_channels()
    try:
        kc.wait_for_ready(timeout=60)
    except RuntimeError:
        kc.stop_channels()
        raise
    kc.allow_stdin = False

    def signal_handler(_signal, _frame):
        raise KaleKernelException()

    # this signal is used by the thread in case an error message is received
    # by the kernel. Running sys.exit() inside the thread would terminate
    # just the thread itself, not the main process. Calling os._exit() can be
    # dangerous as the process is killed instantly (files and connections are
    # not closed, for example). With a signal we can capture the ExitCommand
    # exception from the main process and exit gracefully.
    signal.signal(signal.SIGUSR1, signal_handler)
    # start separate thread in to capture and print stdout, stderr, errors.
    # daemon mode will make the watcher thread die when the main one returns.
    x = threading.Thread(target=capture_streams,
                         args=(
                             kc,
                             True,
                         ),
                         daemon=True)
    x.start()

    try:
        # start preprocessor: run each code cell and capture the output
        ep.preprocess(notebook, resources, km=km)
    except KaleKernelException:
        # exit gracefully with error
        sys.exit(-1)
    # Give some time to the stream watcher thread to receive all messages from
    # the kernel before shutting down.
    time.sleep(1)
    km.shutdown_kernel()

    result = process_outputs(notebook.cells)
    return result
Esempio n. 17
0
def pathway(pth, dir_="ypkassembly", pYPKa_A=True, print=print):

    if len(pth)==0: # pth has to contain some sequences
        print("No of sequences found.")
        return None, None

    names = [s.name for s in pth] # sequence names has to be unique

    #if len(names)>len(set(names)):
    #    print("Gene names are not unique. Please rename sequences so that each sequence has a unique name.\n")
    #    print("Gene names parsed from Data page:\n\n")
    #    for name in names:
    #        print(name)
    #    return None, None

    log=""

    pYPK0 = read(read_data_file("pYPK0.gb"))
    pYPKa = read(read_data_file("pYPKa.gb"))

    from Bio.Restriction import ZraI, AjiI, EcoRV

    files = {"standard_primers.txt"     : read_data_file("standard_primers.txt"),
             "pYPKa.gb"                 : read_data_file("pYPKa.gb"),
             "pYPKpw.gb"                : read_data_file("pYPKpw.gb"),
             "tp_g_tp.png"              : read_bin_file("tp_g_tp.png"),
             "pYPK_ZE.png"              : read_bin_file("pYPK_ZE.png"),
             "pYPK_A.png"               : read_bin_file("pYPK_A.png"),
             "pw.png"                   : read_bin_file("pw.png"),
             "start.bat"                : read_data_file("start.bat"),
             "start.sh"                 : read_data_file("start.sh"),}

    cas_vectors = ""
    tp_gene_tp_links = ""
    pYPKa_clones=""
    pwname = "pYPK0"
    genes = 0
    nbflag=False

    while pth:
        genes+=1
        first = pth.pop(0)
        # is sequence a tp-gene-tp vector?
        if cloned(pYPK0, (ZraI, EcoRV),  first):
            m = re_cas.search(first.description)
            if not m:
                raise Exception( "{} is a pYPK0 tp-gene_tp sequence but was not correctly named.".format(last.description))
            fn = first.description+".gb"
            files[fn] = first.format("gb")
            cas_vectors+= fn+"\n"
            tp_gene_tp_links+= "\n[{}]({})\n".format( first.description, fn )
            tp1_description  = m.group(1)
            gene_description = m.group(2)
            tp2_description  = m.group(3)
            genes+=1
        else:
            try:
                middle = pth.pop(0)
                last   = pth.pop(0)
            except IndexError:
                raise Exception("not enough sequences")

            prom, gene, term = first, middle, last

            if cloned(pYPKa, ZraI,  prom):
                m = re_Z.search(prom.description)
                if not m:
                    raise Exception( "{} is a pYPKa_A_gene sequence but was incorrectly named.".format(gene.description))
                prom_description = m.group(1)
                files[m.group(0)+".gb"] = prom.format("gb")
            else:
                #print("Z"+str(files.has_key("pYPKa_ZE_{}.md".format(prom.id)))+prom.id)
                if "pYPKa_ZE_{}.md".format(prom.id) not in files:
                    files[prom.id+".gb"] = prom.format("gb")
                    nbtemp = read_data_file("nb_template_pYPKa_ZE_insert.md")
                    files["pYPKa_ZE_{}.md".format(prom.id)] = nbtemp.format(tp=prom.id)
                    pYPKa_clones+="[pYPKa_ZE_{n}](pYPKa_ZE_{n}.ipynb)  \n".format(n=prom.id)
                prom_description = prom.id

            if cloned(pYPKa, AjiI,  gene):
                m = re_A.search(gene.description)
                if not m:
                    raise Exception( "{} is a pYPKa_A_gene sequence but was incorrectly named.".format(gene.description))
                gene_description = m.group(1)
                files[m.group(0)+".gb"] = gene.format("gb")
                if not pYPKa_A:
                    nbflag=True

            else:
                n = "pYPKa_A_{}".format(gene.locus)
                files[gene.locus+".gb"] = gene.format("gb")
                if pYPKa_A:
                    nbtemp = read_data_file("nb_template_pYPKa_A_insert.md")
                    files[n+".md"] = nbtemp.format(insert=gene.locus)
                    gene_description = gene.locus
                    pYPKa_clones+="[{}]({}.ipynb)  \n".format(n, n)
                else:
                    gene_description = gene.locus

            if cloned(pYPKa, EcoRV, term):
                m = re_E.search(term.description)
                if not m:
                    raise Exception( "{} is a pYPKa_A_gene sequence but was incorrectly named.".format(gene.description))
                term_description = m.group(1)
                files[m.group(0)+".gb"] = term.format("gb")
            else:
                #print("E"+str(files.has_key("pYPKa_ZE_{}.md".format(term.id)))+term.id)
                if "pYPKa_ZE_{}.md".format(term.id) not in files:
                    files[term.id+".gb"] = term.format("gb")
                    nbtemp = read_data_file("nb_template_pYPKa_ZE_insert.md")
                    files["pYPKa_ZE_{}.md".format(term.id)] = nbtemp.format(tp=term.id)
                    pYPKa_clones+="[pYPKa_ZE_{n}](pYPKa_ZE_{n}.ipynb)  \n".format(n=term.id)
                term_description = term.id

            x = "pYPK0_{}_{}_{}".format(prom_description, gene_description, term_description)

            if pYPKa_A or nbflag:
                nbtemp = read_data_file("nb_template_pYPK0_tp_gene_tp.md")
                files[x+".md"] = nbtemp.format(tpz=prom_description,
                                                gene=gene_description,
                                                tpe=term_description)
            else:
                nbtemp = read_data_file("nb_template_pYPK0_tp_gene_tp_gap_repair.md")
                files[x+".md"] = nbtemp.format(tpz=prom_description,
                                                gene=gene.locus,
                                                tpe=term_description)
            nbflag=False

            cas_vectors+="\n"+x+".gb\n"
            tp_gene_tp_links+="[{}]({}.ipynb)  \n".format(x, x)




        pwname+="_{}".format(gene_description)

    ###########################################################################

    obj = notedown.MarkdownReader()

    cwd = os.getcwd()

    try:
        os.makedirs(dir_)
    except OSError as exception:
        if exception.errno == errno.EEXIST:
            print("The {} directory already exists! Please delete or choose another name.".format(dir_))
        else:
            print("The {} directory could not be created".format(dir_))
        return None, None

    msg = "created subdirectory {}\n".format(dir_)
    print(msg)
    log+=msg

    os.chdir(dir_)

    msg = "\nsaving files sequence files and images..\n"
    print(msg)
    log+=msg

    for name, content in sorted((n, c) for n, c in list(files.items()) if not n.endswith(".md")):
        msg = "\nsaving: "+name
        print(msg)
        log+=msg
        mode = {True:"wb", False:"w"}[hasattr(content, "decode")]
        with open(name, mode) as f:  #with open(name,"wb") as f: 
            f.write(content) 

    print("\n")
    log+="\n"

    msg = "\nsaving notebook files ..\n"
    print(msg)
    log+=msg

    for name, content in sorted((n, c) for n, c in list(files.items()) if n.endswith(".md")):
        newname = os.path.splitext(name)[0]+".ipynb"
        msg = "\nsaving: "+newname
        print(msg)
        log+=msg
        nb = nbformat.write(obj.to_notebook(content), newname)

    pp = ExecutePreprocessor()
    pp.timeout = 120 # seconds
    pp.interrupt_on_timeout = True

    print("\n")
    log+="\n"

    msg = "\nexecuting pYPKa notebooks..\n"
    print(msg)
    log+=msg

    shell = InteractiveShell.instance()
    #new_primers = []

    g={}
    l={}

    pypkanbs = sorted([f for f in os.listdir(".") if re.match("pYPKa.+\.ipynb", f)])

    if pypkanbs:
        for name in pypkanbs:
            msg = "\nexecuting: "+name
            print(msg)
            log+=msg
            with io.open(name, 'r', encoding='utf-8') as f: nb = nbformat.read(f, 4)
            nb_executed, resources = pp.preprocess(nb, resources={})
            for cell in nb.cells:
                if cell.cell_type == 'code':
                    code = shell.input_transformer_manager.transform_cell(cell.source)
                    exec(code, g, l)
            #new_primers.extend( (l["fp"], l["rp"]) )
            nbformat.write(nb, name)
            g={}
            l={}
    else:
        msg = "\nNo pYPKa notebooks found.\n"
        print(msg)
        log+=msg
    print("\n")
    log+="\n"
    msg = "\nexecuting pYPK0 notebooks..\n"
    print(msg)
    log+=msg

    g={}
    l={}
    resources={}

    pypk0nbs = sorted([f for f in os.listdir(".") if re.match("pYPK0.+\.ipynb", f)])

    if pypk0nbs:
        for name in pypk0nbs:
            msg = "\nexecuting: "+name
            print(msg)
            log+=msg
            with io.open(name, 'r', encoding='utf-8') as f: nb = nbformat.read(f, 4)
            nb_executed, resources = pp.preprocess(nb, resources={})
            nbformat.write(nb, name)
            for cell in nb.cells:
                if cell.cell_type == 'code':
                    code = shell.input_transformer_manager.transform_cell(cell.source)
                    exec(code, g, l)
            #try:
                #new_primers.extend( (l["fp"], l["rp"]) )
            #except KeyError:
            #    pass
            g={}
            l={}
    else:
        msg = "\nNo pYPK0 notebooks found.\n"
        print(msg)
        log+=msg
    nbtemp = read_data_file("nb_template_pYPK0_pw.md")

    #primer_list = "\n".join( p.format("tab") for p in new_primers )

    #if new_primers:
    #    msg = u"\n\nsaving new_primers.txt..\n"
    #with open("new_primers.txt","wb") as f: f.write("\n".join( p.format("fasta") for p in new_primers ))

    #print("qwerty")
    #print(pwname)
    #print(os.path.basename(dir_))
    #print(tp_gene_tp_links)
    #print(add_space(cas_vectors, 17))
    #print(pYPKa_clones)
    #print(str(genes))
    #print("123456789")

    pwnb = nbtemp.format(name=pwname,
                         filename=os.path.basename(dir_),
                         tp_gene_tp_links = tp_gene_tp_links,
                         cas_vectors=add_space(cas_vectors, 17),
                         pYPKa_clones=pYPKa_clones,
                         length=genes)

    nb = nbformat.write(obj.to_notebook(pwnb), "pw.ipynb")

    #nb = nbformat.writes("pw.ipynb", obj.to_notebook(pwnb))
    #with open("pw.ipynb", "w") as f: f.write(nb)

    msg = "\n\nexecuting final pathway notebook..\n"
    print(msg)
    log+=msg
    msg = "\nexecuting: pw.ipynb"
    print(msg)
    log+=msg
    with io.open("pw.ipynb", 'r', encoding='utf-8') as f: nb = nbformat.read(f, 4)
    nb_executed, resources = pp.preprocess(nb, resources={})
    nbformat.write(nb, "pw.ipynb")

    #for nb_ in [f for f in os.listdir(".") if f.endswith(".ipynb")]:
    #    subprocess.Popen(["ipython", "nbconvert", os.path.join(dir_, nb_)])

    os.chdir(cwd)

    fl = FileLink(os.path.join(dir_, "pw.ipynb"))

    #   pp = None

    return fl, log
Esempio n. 18
0
nb_style

# In[7]:

from pathlib import Path
from pidgin import markdown, conventions
from nbconvert.exporters.python import PythonExporter
from IPython.utils.capture import capture_output
from nbconvert.preprocessors.execute import ExecutePreprocessor

# In[8]:

exporter = PythonExporter(preprocessors=[
    markdown.Normalize(),
    conventions.Normalize(),
    ExecutePreprocessor()
])


def test_nbconvert_script():
    with capture_output():
        Path('informal_script.py').write_text(
            exporter.from_filename(__file__)[0])
        import informal_script
    assert informal_script, """The script was not created."""
    print('nbconvert is complete.')


(__name__ == '__main__' and
 Path(__file__).parts[-1].startswith('informal.')) and test_nbconvert_script()
Esempio n. 19
0
    def run_notebook(self, name, event=None, timeout=None):
        """ run a given notebook immediately.

        Args:
            name (str): the name of the jobfile
            event (str): an event name
            timeout (int): timeout in seconds

        Returns:
            Metadata of results

        See Also:
            * nbconvert https://nbconvert.readthedocs.io/en/latest/execute_api.html
        """
        notebook = self.get(name)
        meta_job = self.metadata(name)
        ts = datetime.datetime.now()
        # execute kwargs
        # -- see ExecuteProcessor class
        # -- see https://nbconvert.readthedocs.io/en/latest/execute_api.html
        ep_kwargs = {
            # avoid timeouts to stop kernel
            'timeout': timeout,
            # avoid kernel at exit functions
            # -- this stops ipykernel AttributeError 'send_multipart'
            'shutdown_kernel': 'immediate',
            # set kernel name, blank is default
            # -- e.g. python3, ir
            # -- see https://stackoverflow.com/a/47053020/890242
            'kernel_name': '',
        }
        # other interesting options
        ep_kwargs.update(meta_job.kind_meta.get('ep_kwargs', {}))
        try:
            resources = {
                'metadata': {
                    'path': self.defaults.OMEGA_TMP,
                }
            }
            if not meta_job.kind_meta.get('keep_output', False):
                # https://nbconvert.readthedocs.io/en/latest/api/preprocessors.html
                cp = ClearOutputPreprocessor()
                cp.preprocess(notebook, resources)
            ep = ExecutePreprocessor(**ep_kwargs)
            ep.preprocess(notebook, resources)
        except Exception as e:
            status = 'ERROR'
            message = str(e)
        else:
            status = 'OK'
            message = ''
        finally:
            del ep
        # record results
        meta_results = self.put(notebook,
                                'results/{name}_{ts}'.format(**locals()))
        meta_results.attributes['source_job'] = name
        meta_results.save()
        job_results = meta_job.attributes.get('job_results', [])
        job_results.append(meta_results.name)
        meta_job.attributes['job_results'] = job_results
        # record final job status
        job_runs = meta_job.attributes.get('job_runs', [])
        runstate = {
            'status': status,
            'ts': ts,
            'message': message,
            'results': meta_results.name if status == 'OK' else None
        }
        job_runs.append(runstate)
        meta_job.attributes['job_runs'] = job_runs
        # set event run state if event was specified
        if event:
            attrs = meta_job.attributes
            triggers = attrs['triggers'] = attrs.get('triggers', [])
            scheduled = (trigger for trigger in triggers
                         if trigger['event-kind'] == 'scheduled')
            for trigger in scheduled:
                if event == trigger['event']:
                    trigger['status'] = status
                    trigger['ts'] = ts
        meta_job.save()
        return meta_results
Esempio n. 20
0
import os
import sys

from nbconvert.preprocessors.execute import ExecutePreprocessor
from nbconvert.nbconvertapp import NbConvertApp
from nbconvert.preprocessors import ExecutePreprocessor
from nbconvert.preprocessors.execute import CellExecutionError
import nbformat

folder = 'notebooks'
os.chdir(folder)
nb_list = [fname for fname in os.listdir('.') if fname.endswith('.ipynb')]
nb_list.sort()

ex = ExecutePreprocessor()
ex.timeout = 180  # seconds
ex.interrupt_on_timeout = True

has_error = False
for notebook in nb_list:
    nb = nbformat.read(notebook, as_version=4)
    try:
        nb_executed, resources = ex.preprocess(nb, resources={})
    except CellExecutionError as e:
        print('Fail: %s \n%s\n\n' % (notebook, e.traceback[-1]))
        has_error = True

os.chdir('..')
sys.exit(-1 if has_error else 0)