def test_inline_merge_source_empty(): base = new_notebook() local = new_notebook() remote = new_notebook() expected = new_notebook() merged, decisions = merge_notebooks(base, local, remote) assert merged == expected
def test_inline_merge_dummy_notebooks(): "Just the basic empty notebook passes through." base = new_notebook() local = new_notebook() remote = new_notebook() expected = new_notebook() merged, decisions = merge_notebooks(base, local, remote) assert expected == merged
def test_inline_merge_notebook_version(): "Minor version gets bumped to max." base = new_notebook(nbformat=4, nbformat_minor=0) local = new_notebook(nbformat=4, nbformat_minor=1) remote = new_notebook(nbformat=4, nbformat_minor=2) expected = new_notebook(nbformat=4, nbformat_minor=2) merged, decisions = merge_notebooks(base, local, remote) assert expected == merged
def test_inline_merge_attachments(): # FIXME: Use output creation utils Vidar wrote in another test file base = new_notebook() local = new_notebook() remote = new_notebook() expected = new_notebook() merged, decisions = merge_notebooks(base, local, remote) assert merged == expected
def test_modify_cell_with_submissions(self, preprocessor, gradebook, resources): nb = new_notebook() nb.cells.append(create_grade_and_solution_cell("hello", "markdown", "foo", 2)) nb, resources = preprocessor.preprocess(nb, resources) notebook = gradebook.find_notebook("test", "ps0") grade_cell = gradebook.find_grade_cell("foo", "test", "ps0") solution_cell = gradebook.find_solution_cell("foo", "test", "ps0") source_cell = gradebook.find_source_cell("foo", "test", "ps0") assert grade_cell.max_score == 2 assert source_cell.source == "hello" gradebook.add_student("hacker123") submission = gradebook.add_submission("ps0", "hacker123").notebooks[0] assert len(notebook.submissions) == 1 nb.cells[-1] = create_grade_and_solution_cell("goodbye", "markdown", "foo", 1) nb, resources = preprocessor.preprocess(nb, resources) gradebook.db.refresh(notebook) gradebook.db.refresh(submission) gradebook.db.refresh(grade_cell) gradebook.db.refresh(solution_cell) gradebook.db.refresh(source_cell) assert len(notebook.submissions) == 1 assert grade_cell.max_score == 1 assert source_cell.source == "goodbye"
def setUp(self): nbdir = self.notebook_dir subdir = pjoin(nbdir, 'foo') try: os.mkdir(subdir) except OSError as e: # Deleting the folder in an earlier test may have failed if e.errno != errno.EEXIST: raise self.addCleanup(partial(shutil.rmtree, subdir, ignore_errors=True)) with io.open(pjoin(subdir, 'nb1.ipynb'), 'w', encoding='utf-8') as f: nb = new_notebook() write(nb, f, version=4) self.sess_api = SessionAPI(self.request) @self.addCleanup def cleanup_sessions(): for session in self.sess_api.list().json(): self.sess_api.delete(session['id']) # This is necessary in some situations on Windows: without it, it # fails to delete the directory because something is still using # it. I think there is a brief period after the kernel terminates # where Windows still treats its working directory as in use. On my # Windows VM, 0.01s is not long enough, but 0.1s appears to work # reliably. -- TK, 15 December 2014 time.sleep(0.1)
def test_grade_existing_manual_grade(self, preprocessors, gradebook, resources): """Is a failing code cell correctly graded?""" cell = create_grade_and_solution_cell("hello", "markdown", "foo", 1) nb = new_notebook() nb.cells.append(cell) preprocessors[0].preprocess(nb, resources) gradebook.add_submission("ps0", "bar") cell.source = "hello!" preprocessors[1].preprocess(nb, resources) grade_cell = gradebook.find_grade("foo", "test", "ps0", "bar") assert grade_cell.score == 0 assert grade_cell.max_score == 1 assert grade_cell.auto_score == None assert grade_cell.manual_score == None assert grade_cell.needs_manual_grade grade_cell.manual_score = 1 grade_cell.needs_manual_grade = False gradebook.db.commit() preprocessors[1].preprocess(nb, resources) grade_cell = gradebook.find_grade("foo", "test", "ps0", "bar") assert grade_cell.score == 1 assert grade_cell.max_score == 1 assert grade_cell.auto_score == None assert grade_cell.manual_score == 1 assert grade_cell.needs_manual_grade
def new(self, model=None, path=''): """Create a new file or directory and return its model with no content. To create a new untitled entity in a directory, use `new_untitled`. """ path = path.strip('/') if model is None: model = {} if path.endswith('.ipynb'): model.setdefault('type', 'notebook') else: model.setdefault('type', 'file') # no content, not a directory, so fill out new-file model if 'content' not in model and model['type'] != 'directory': if model['type'] == 'notebook': model['content'] = new_notebook() model['format'] = 'json' else: model['content'] = '' model['type'] = 'file' model['format'] = 'text' model = self.save(model, path) return model
def build_notebook(self, with_json_outputs=False): """Build a notebook in memory for use with preprocessor tests""" outputs = [ nbformat.new_output("stream", name="stdout", text="a"), nbformat.new_output("display_data", data={'text/plain': 'b'}), nbformat.new_output("stream", name="stdout", text="c"), nbformat.new_output("stream", name="stdout", text="d"), nbformat.new_output("stream", name="stderr", text="e"), nbformat.new_output("stream", name="stderr", text="f"), nbformat.new_output("display_data", data={'image/png': 'Zw=='}), # g nbformat.new_output("display_data", data={'application/pdf': 'aA=='}), # h ] if with_json_outputs: outputs.extend([ nbformat.new_output( "display_data", data={'application/json': [1, 2, 3]} ), # j nbformat.new_output( "display_data", data={'application/json': {'a': 1, 'c': {'b': 2}}} ), # k nbformat.new_output( "display_data", data={'application/json': 'abc'} ), # l nbformat.new_output( "display_data", data={'application/json': 15.03} ), # m ]) cells=[nbformat.new_code_cell(source="$ e $", execution_count=1, outputs=outputs), nbformat.new_markdown_cell(source="$ e $")] return nbformat.new_notebook(cells=cells)
def notebook(self, s): """Export and convert IPython notebooks. This function can export the current IPython history to a notebook file. For example, to export the history to "foo.ipynb" do "%notebook foo.ipynb". The -e or --export flag is deprecated in IPython 5.2, and will be removed in the future. """ args = magic_arguments.parse_argstring(self.notebook, s) from nbformat import write, v4 cells = [] hist = list(self.shell.history_manager.get_range()) if(len(hist)<=1): raise ValueError('History is empty, cannot export') for session, execution_count, source in hist[:-1]: cells.append(v4.new_code_cell( execution_count=execution_count, source=source )) nb = v4.new_notebook(cells=cells) with io.open(args.filename, 'w', encoding='utf-8') as f: write(nb, f, version=4)
def split_into_units(nb_name, include_header=True): """Split notebook into units.""" try: nb = nbformat.read(nb_name, as_version=4) except IOError as e: if e.errno == 2: print('File not found: {0}'.format(nb_name), sys.stderr) return [] else: raise e cells = nb.cells indexes = [i for i, cell in enumerate(cells) if cell.cell_type == 'markdown' and cell.source.startswith('# ')] separated_cells = [cells[i:j] for i, j in zip(indexes, indexes[1:]+[None])] units = [current.new_notebook(cells=cells, metadata={'name': cells[0] .source .split('\n')[0][2:]}) for cells in separated_cells] if not include_header: for unit in units: # The first line is the header. unit.cells[0].source = '\n'.join(unit.cells[0].source .split('\n')[1:]) return units
def test_preprocessor_collapsible_headings(): """Test collapsible_headings preprocessor.""" # check import shortcut from jupyter_contrib_nbextensions.nbconvert_support import CollapsibleHeadingsPreprocessor # noqa cells = [] for lvl in range(6, 1, -1): for collapsed in (True, False): cells.extend([ nbf.new_markdown_cell( source='{} {} heading level {}'.format( '#' * lvl, 'Collapsed' if collapsed else 'Uncollapsed', lvl), metadata={'heading_collapsed': True} if collapsed else {}), nbf.new_markdown_cell(source='\n'.join([ 'want hidden' if collapsed else 'want to see', 'what I mean', ])), nbf.new_code_cell(source='\n'.join([ 'want hidden' if collapsed else 'want to see', 'what I mean', ])), ]) notebook_node = nbf.new_notebook(cells=cells) body, resources = export_through_preprocessor( notebook_node, CollapsibleHeadingsPreprocessor, RSTExporter, 'rst') assert_not_in('hidden', body, 'check text hidden by collapsed headings') assert_in('want to see', body, 'check for text under uncollapsed headings')
def test_very_long_cells(self): """ Torture test that long cells do not cause issues """ lorem_ipsum_text = textwrap.dedent("""\ Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec dignissim, ipsum non facilisis tempus, dui felis tincidunt metus, nec pulvinar neque odio eget risus. Nulla nisi lectus, cursus suscipit interdum at, ultrices sit amet orci. Mauris facilisis imperdiet elit, vitae scelerisque ipsum dignissim non. Integer consequat malesuada neque sit amet pulvinar. Curabitur pretium ut turpis eget aliquet. Maecenas sagittis lacus sed lectus volutpat, eu adipiscing purus pulvinar. Maecenas consequat luctus urna, eget cursus quam mollis a. Aliquam vitae ornare erat, non hendrerit urna. Sed eu diam nec massa egestas pharetra at nec tellus. Fusce feugiat lacus quis urna sollicitudin volutpat. Quisque at sapien non nibh feugiat tempus ac ultricies purus. """) lorem_ipsum_text = lorem_ipsum_text.replace("\n"," ") + "\n\n" large_lorem_ipsum_text = "".join([lorem_ipsum_text]*3000) notebook_name = "lorem_ipsum_long.ipynb" nb = v4.new_notebook( cells=[ v4.new_markdown_cell(source=large_lorem_ipsum_text) ] ) with TemporaryDirectory() as td: nbfile = os.path.join(td, notebook_name) with open(nbfile, 'w') as f: write(nb, f, 4) (output, resources) = LatexExporter(template_file='article').from_filename(nbfile) assert len(output) > 0
def setUp(self): nbdir = self.notebook_dir if not os.path.isdir(pjoin(nbdir, 'foo')): subdir = pjoin(nbdir, 'foo') os.mkdir(subdir) # Make sure that we clean this up when we're done. # By using addCleanup this will happen correctly even if we fail # later in setUp. @self.addCleanup def cleanup_dir(): shutil.rmtree(subdir, ignore_errors=True) nb = new_notebook() nb.cells.append(new_markdown_cell(u'Created by test ³')) cc1 = new_code_cell(source=u'print(2*6)') cc1.outputs.append(new_output(output_type="stream", text=u'12')) cc1.outputs.append(new_output(output_type="execute_result", data={'image/png' : png_green_pixel}, execution_count=1, )) nb.cells.append(cc1) with io.open(pjoin(nbdir, 'foo', 'testnb.ipynb'), 'w', encoding='utf-8') as f: write(nb, f, version=4) self.nbconvert_api = NbconvertAPI(self.request)
def test_raw_template_dynamic_attr_reversed(self): """ Test that template_file and raw_template traitlets play nicely together. - source assigns raw_template default first, then template_file - checks that the raw_template overrules template_file if set - checks that once raw_template is set to '', template_file returns """ nb = v4.new_notebook() nb.cells.append(v4.new_code_cell("some_text")) class AttrDynamicExporter(TemplateExporter): @default('raw_template') def _raw_template_default(self): return raw_template @default('template_file') def _template_file_default(self): return "rst.tpl" exporter_attr_dynamic = AttrDynamicExporter() output_attr_dynamic, _ = exporter_attr_dynamic.from_notebook_node(nb) assert "blah" in output_attr_dynamic exporter_attr_dynamic.raw_template = '' assert exporter_attr_dynamic.template_file == "rst.tpl" output_attr_dynamic, _ = exporter_attr_dynamic.from_notebook_node(nb) assert "blah" not in output_attr_dynamic
def create_notebook(name, cells): nb = new_notebook() for cell in cells: nb.cells.append(new_code_cell(source=cell)) with open(name, "w") as fh: write(nb, fh, 4)
def create_rich_help_func(self): ''' Build a help renderer with an attached __richdoc__ notebook. ''' nb = nb_v4.new_notebook() f = rich_help() f.__richdoc__ = nb return f
def test_htmltoc2(self): """Test exporter for adding table of contents""" nb = v4.new_notebook(cells=[ v4.new_code_cell(source="a = 'world'"), v4.new_markdown_cell(source="# Heading"), ]) self.check_stuff_gets_embedded( nb, 'html_toc', to_be_included=['toc2'])
def test_nonexistant_grade_id(self, preprocessors, resources): """Are cells not in the database ignored?""" cell = create_grade_cell("", "code", "", 1) cell.metadata.nbgrader['grade'] = False nb = new_notebook() nb.cells.append(cell) nb, resources = preprocessors[0].preprocess(nb, resources) nb, resources = preprocessors[1].preprocess(nb, resources) assert 'grade_id' not in cell.metadata.nbgrader cell = create_grade_cell("", "code", "foo", 1) cell.metadata.nbgrader['grade'] = False nb = new_notebook() nb.cells.append(cell) nb, resources = preprocessors[0].preprocess(nb, resources) nb, resources = preprocessors[1].preprocess(nb, resources) assert 'grade_id' not in cell.metadata.nbgrader
def convert_normal_cells(normal_cells): """ Convert normal_cells into html. """ for cell in normal_cells: if cell.cell_type == 'markdown': cell.source = re.sub(r'\\begin\{ *equation *\}', '\[', cell.source) cell.source = re.sub(r'\\end\{ *equation *\}', '\]', cell.source) tmp = current.new_notebook(cells=normal_cells) html = export_unit_to_html(tmp) return html
def _empty_notebook(self, path): nb = new_notebook() full_dest = os.path.join(os.getcwd(), path) if not os.path.exists(os.path.dirname(full_dest)): os.makedirs(os.path.dirname(full_dest)) if os.path.exists(full_dest): os.remove(full_dest) with open(full_dest, 'w') as f: write_nb(nb, f, 4)
def _empty_notebook(self, path): nb = new_notebook() full_dest = os.path.abspath(path) if not os.path.exists(os.path.dirname(full_dest)): os.makedirs(os.path.dirname(full_dest)) if os.path.exists(full_dest): os.remove(full_dest) with open(full_dest, "w") as f: write_nb(nb, f, 4)
def test_embedhtml(self): """Test exporter for embedding images into HTML""" nb = v4.new_notebook(cells=[ v4.new_code_cell(source="a = 'world'"), v4.new_markdown_cell( source="![testimage]({})".format(path_in_data('icon.png')) ), ]) self.check_stuff_gets_embedded( nb, 'html_embed', to_be_included=['base64'])
def test_raw_template_assignment(self): """ Test `raw_template` assigned after the fact on non-custom Exporter. """ nb = v4.new_notebook() nb.cells.append(v4.new_code_cell("some_text")) exporter_assign = TemplateExporter() exporter_assign.raw_template = raw_template output_assign, _ = exporter_assign.from_notebook_node(nb) assert "blah" in output_assign
def test_raw_template_constructor(self): """ Test `raw_template` as a keyword argument in the exporter constructor. """ nb = v4.new_notebook() nb.cells.append(v4.new_code_cell("some_text")) output_constructor, _ = TemplateExporter( raw_template=raw_template).from_notebook_node(nb) assert "blah" in output_constructor
def test_fail_to_find_template_file(self): # Create exporter with invalid template file, check that it doesn't # exist in the environment, try to convert empty notebook. Failure is # expected due to nonexistant template file. template = 'does_not_exist.tpl' exporter = TemplateExporter(template_file=template) assert template not in exporter.environment.list_templates(extensions=['tpl']) nb = v4.new_notebook() with pytest.raises(TemplateNotFound): out, resources = exporter.from_notebook_node(nb)
def build_notebook(self): """Build a reveal slides notebook in memory for use with tests. Overrides base in PreprocessorTestsBase""" outputs = [nbformat.new_output(output_type='display_data', data={'image/svg+xml':self.simple_svg}) ] cells=[nbformat.new_code_cell(source="", execution_count=1, outputs=outputs)] return nbformat.new_notebook(cells=cells)
def test_htmltoc2(self): """Test exporter for adding table of contents""" nb = v4.new_notebook(cells=[ v4.new_code_cell(source="a = 'world'"), v4.new_markdown_cell(source="# Heading"), ]) def check(byte_string, root_node): assert b'toc2' in byte_string self.check_html(nb, 'html_toc', check_func=check)
def py_to_ipynb(source, dest): # Create the code cells by parsing the file in input cells = [] for c in parse_py(source): cells.append(new_code_cell(source=c)) # This creates a V4 Notebook with the code cells extracted above nb0 = new_notebook(cells=cells, metadata={"language": "python"}) with codecs.open(dest, encoding="utf-8", mode="w") as f: nbformat.write(nb0, f, 4)
def test_htmltoc2(self): """Test exporter for adding table of contents""" with self.create_temp_cwd(): nb = v4.new_notebook( cells=[v4.new_code_cell(source="a = 'world'"), v4.new_markdown_cell(source="# Heading")] ) with io.open("notebook2.ipynb", "w", encoding="utf-8") as f: write(nb, f, 4) self.nbconvert("--to html_toc" ' "notebook2"') assert os.path.isfile("notebook2.html")
def test_run_nb_error(self): """Test %run notebook.ipynb error""" from nbformat import v4, writes # %run when a file name isn't provided nt.assert_raises(Exception, _ip.magic, "run") # %run when a file doesn't exist nt.assert_raises(Exception, _ip.magic, "run foobar.ipynb") # %run on a notebook with an error nb = v4.new_notebook(cells=[v4.new_code_cell("0/0")]) src = writes(nb, version=4) self.mktmp(src, ext='.ipynb') nt.assert_raises(Exception, _ip.magic, "run %s" % self.fname)
def new(self, model=None, path=''): self.log.debug('NEW NOTEBOOK: %s' % (path)) ################################################# path = path.strip('/') if model is None: model = {} model.setdefault('type', 'notebook') model['content'] = new_notebook() model['format'] = 'json' model = self.save(model, path, True) return model
def test_raw_template_attr(self): """ Verify that you can assign a in memory template string by overwriting `raw_template` as simple(non-traitlet) attribute """ nb = v4.new_notebook() nb.cells.append(v4.new_code_cell("some_text")) class AttrExporter(TemplateExporter): raw_template = raw_template exporter_attr = AttrExporter(template_name="rst") output_attr, _ = exporter_attr.from_notebook_node(nb) assert "blah" in output_attr
def format_code(self,file_content,extension): if(extension=="python"): #print(file_content) #file_content=file_content.decode("utf-8") result=FormatCode(file_content) #print("Formated code",result) #print(result) return result[0] elif(extension=='notebook'): nb=file_content #nb=nbformat.read(inputnb,4) newnb = nbf.new_notebook() newcells = [] for idx, cell in enumerate(nb["cells"]): if cell["cell_type"]=='code': try: formatted=FormatCode(cell["source"]) #print(formatted) newcell = nbf.new_code_cell(formatted) #print(newcell) newcells.append(newcell) except: formatted=cell["source"] newcell = nbf.new_code_cell(formatted) newcells.append(newcell) else: formatted=cell["source"] newcell = nbf.new_markdown_cell(formatted) newcells.append(newcell) #print(newcells) newnb = nbf.new_notebook(cells=newcells) elif(extension=="HTML"): #file_content=file_content.decode("utf-8") soup=BeautifulSoup(file_content,"html.parser") result=soup.prettify().encode('cp1252', errors='ignore') a=result.decode('utf-8') return a
def generate_html(base_dir, fn, conditions=None, show_details=True, include_images=True, sort_samples=True): logo_fn = Path(os.path.realpath(__file__)).parent / 'logo_v2.png' logo_URI, logo_width, logo_height = fn_to_URI(logo_fn) nb = nbf.new_notebook() documentation_cell_contents = f'''\ <a target="_blank" href="https://github.com/jeffhussmann/knock-knock" rel="nofollow"><img width={logo_width} height={logo_height} src={logo_URI} alt="knock-knock" align="left"></a> <br clear="all"> knock-knock is a tool for exploring, categorizing, and quantifying the full spectrum of sequence outcomes produced by CRISPR knock-in experiments. <a href="https://github.com/jeffhussmann/knock-knock/blob/master/docs/visualization.md#interactive-exploration-of-outcomes" target="_blank">How to use this table</a> <a href="https://github.com/jeffhussmann/knock-knock/blob/master/docs/visualization.md" target="_blank">How to interpret read diagrams</a> ''' table_cell_contents = f'''\ import knock_knock.table conditions = {conditions} knock_knock.table.make_table('{base_dir}', conditions, show_details={show_details}, include_images={include_images}, sort_samples={sort_samples}, ) ''' nb['cells'] = [ nbf.new_markdown_cell(documentation_cell_contents), nbf.new_code_cell(table_cell_contents), ] nb['metadata'] = { 'title': str(fn.name), 'include_images': include_images, } exporter = nbconvert.HTMLExporter(exclude_input=True, exclude_output_prompt=True) template_path = Path(os.path.realpath(__file__)).parent / 'modal_template.tpl' exporter.template_file = str(template_path) ep = nbconvert.preprocessors.ExecutePreprocessor(timeout=600, kernel_name='python3') ep.preprocess(nb, {}) body, resources = exporter.from_notebook_node(nb) with open(fn, 'w') as fh: fh.write(body)
def test_run_job_timeout_kwargs(self): """ test running a job that times out (kwargs) """ om = self.om # create a long-running notebook cells = [] code = "import time; time.sleep(10)" cells.append(v4.new_code_cell(source=code)) notebook = v4.new_notebook(cells=cells) om.jobs.drop('testjob', force=True) meta = om.jobs.put(notebook, 'testjob') # -- execute with default timeout, expect to succeed meta_job = om.jobs.run('testjob') self.assertIsInstance(meta_job, Metadata) meta = om.jobs.metadata('testjob') runs = meta.attributes['job_runs'] results = meta.attributes['job_results'] self.assertIn('job_runs', meta.attributes) self.assertEqual(len(results), 1) resultnb = results[0] self.assertTrue(om.jobs.exists(resultnb)) self.assertEqual(runs[0]['results'], resultnb) # -- put the notebook with a timeout less than expected running time # -- expect run to fail due to timeout as kwarg om.jobs.drop('testjob', force=True) meta = om.jobs.put(notebook, 'testjob') meta_job = om.jobs.run('testjob', timeout=5) self.assertIsInstance(meta_job, Metadata) meta = om.jobs.metadata('testjob') self.assertIn('job_runs', meta.attributes) runs = meta.attributes['job_runs'] this_run = runs[0] self.assertEqual(this_run['status'], 'ERROR') self.assertIn('execution timed out', this_run['message']) self.assertEqual(len(runs), 1) # -- retry with no timeout om.jobs.drop('testjob', force=True) meta = om.jobs.put(notebook, 'testjob') meta_job = om.jobs.run('testjob', timeout=None) self.assertIsInstance(meta_job, Metadata) meta = om.jobs.metadata('testjob') runs = meta.attributes['job_runs'] results = meta.attributes['job_results'] self.assertIn('job_runs', meta.attributes) self.assertEqual(len(results), 1) resultnb = results[0] self.assertTrue(om.jobs.exists(resultnb)) self.assertEqual(runs[0]['results'], resultnb)
def test_raw_template_deassignment(self): """ Test `raw_template` does not overwrite template_file if deassigned after being assigned to a non-custom Exporter. """ nb = v4.new_notebook() nb.cells.append(v4.new_code_cell("some_text")) exporter_deassign = RSTExporter() exporter_deassign.raw_template = raw_template output_deassign, _ = exporter_deassign.from_notebook_node(nb) assert "blah" in output_deassign exporter_deassign.raw_template = '' assert exporter_deassign.template_file == 'index.rst.j2' output_deassign, _ = exporter_deassign.from_notebook_node(nb) assert "blah" not in output_deassign
def test_javascript_output(self): nb = v4.new_notebook( cells=[ v4.new_code_cell( outputs=[v4.new_output( output_type='display_data', data={ 'application/javascript': "javascript_output();" } )] ) ] ) (output, resources) = HTMLExporter(template_file='basic').from_notebook_node(nb) self.assertIn('javascript_output', output)
def test_save_unchanged_markdown(self, preprocessors, gradebook, resources): """Is an unchanged markdown cell correctly graded?""" cell = create_grade_and_solution_cell("hello", "markdown", "foo", 1) cell.metadata.nbgrader['checksum'] = compute_checksum(cell) nb = new_notebook() nb.cells.append(cell) preprocessors[0].preprocess(nb, resources) gradebook.add_submission("ps0", "bar") preprocessors[1].preprocess(nb, resources) preprocessors[2].preprocess(nb, resources) assert cell.metadata.nbgrader['score'] == 0 assert cell.metadata.nbgrader['points'] == 1 assert cell.metadata.nbgrader['comment'] == "No response."
def setUp(self): nbdir = self.notebook_dir.name try: os.mkdir(pjoin(nbdir, 'foo')) except OSError as e: # Deleting the folder in an earlier test may have failed if e.errno != errno.EEXIST: raise with io.open(pjoin(nbdir, 'foo', 'nb1.ipynb'), 'w', encoding='utf-8') as f: nb = new_notebook() write(nb, f, version=4) self.sess_api = SessionAPI(self.base_url())
def generate_notebook(fname, nlines=10, ncells=100): """Generate a notebook to test loading time fname: destination filename nlines: number of lines of input/output per cell ncells: number of cells """ nb = v4.new_notebook() source = '\n'.join(['print(%i)' % i for i in range(nlines)]) output = v4.new_output('stream', text='\n'.join(map(str, range(nlines)))) nb.cells = [ v4.new_code_cell(source, outputs=[output]) for i in range(ncells) ] with open(fname, 'w') as f: write(nb, f)
def test_custom_filter_highlight_code(self): # Overwriting filters takes place at: Exporter.from_notebook_node nb = v4.new_notebook() nb.cells.append(v4.new_code_cell("some_text")) def custom_highlight_code(source, language="python", metadata=None, strip_verbatim=False): return source + " ADDED_TEXT" filters = {"highlight_code": custom_highlight_code} (output, resources) = LatexExporter(filters=filters).from_notebook_node(nb) self.assertTrue("ADDED_TEXT" in output)
def test_script_exporter_entrypoint(): nb = v4.new_notebook() nb.metadata.language_info = { 'name': 'dummy', 'mimetype': 'text/x-dummy', } p = os.path.join(os.path.dirname(nbconvert.tests.__file__), 'exporter_entrypoint') sys.path.insert(0, p) try: output, _ = ScriptExporter().from_notebook_node(nb) assert output == 'dummy-script-exported' finally: sys.path.remove(p)
def nbuild(in_files: List[str]) -> NotebookNode: """Create an unexecuted Jupyter notebook from markdown and code files. :param in_files: A list of source file names. :return: An unexecuted ``nbformat.NotebookNode`` object. """ nb = new_notebook() nb.cells = [ new_code_cell(Path(name).read_text()) if name.endswith((".py", ".R")) else new_markdown_cell(Path(name).read_text()) for name in in_files ] return nb
def test_run_nb(self): """Test %run notebook.ipynb""" from nbformat import v4, writes nb = v4.new_notebook( cells=[ v4.new_markdown_cell("The Ultimate Question of Everything"), v4.new_code_cell("answer=42") ] ) src = writes(nb, version=4) self.mktmp(src, ext='.ipynb') _ip.magic("run %s" % self.fname) nt.assert_equal(_ip.user_ns['answer'], 42)
def setup_class(cls): """Make a test notebook. Borrowed from nbconvert test. Assumes the class teardown will clean it up in the end.""" super(BundleAPITest, cls).setup_class() nbdir = cls.notebook_dir.name nb = new_notebook() nb.cells.append(new_markdown_cell(u'Created by test')) cc1 = new_code_cell(source=u'print(2*6)') cc1.outputs.append(new_output(output_type="stream", text=u'12')) nb.cells.append(cc1) with io.open(pjoin(nbdir, 'testnb.ipynb'), 'w', encoding='utf-8') as f: write(nb, f, version=4)
def test_html_collapsible_headings(self): """Test exporter for inlining collapsible_headings""" nb = v4.new_notebook(cells=[ v4.new_markdown_cell(source=('# level 1 heading')), v4.new_code_cell(source='a = range(1,10)'), v4.new_markdown_cell(source=('## level 2 heading')), v4.new_code_cell(source='a = range(1,10)'), v4.new_markdown_cell(source=('### level 3 heading')), v4.new_code_cell(source='a = range(1,10)'), ]) def check(byte_string, root_node): assert b'collapsible_headings' in byte_string self.check_html(nb, 'html_ch', check_func=check)
def test_export_python(self): """delegate to custom exporter from language_info""" exporter = self.exporter_class() pynb = v4.new_notebook() (output, resources) = self.exporter_class().from_notebook_node(pynb) self.assertNotIn('# coding: utf-8', output) pynb.metadata.language_info = { 'name': 'python', 'mimetype': 'text/x-python', 'nbconvert_exporter': 'python', } (output, resources) = self.exporter_class().from_notebook_node(pynb) self.assertIn('# coding: utf-8', output)
def test_export_python(self): """delegate to custom exporter from language_info""" exporter = self.exporter_class() pynb = v4.new_notebook() (output, resources) = self.exporter_class().from_notebook_node(pynb) self.assertNotIn("# coding: utf-8", output) pynb.metadata.language_info = { "name": "python", "mimetype": "text/x-python", "nbconvert_exporter": "python", } (output, resources) = self.exporter_class().from_notebook_node(pynb) self.assertIn("# coding: utf-8", output)
def test_script_exporter_entrypoint(): nb = v4.new_notebook() nb.metadata.language_info = { "name": "dummy", "mimetype": "text/x-dummy", } p = os.path.join(os.path.dirname(nbconvert.tests.__file__), "exporter_entrypoint") sys.path.insert(0, p) try: output, _ = ScriptExporter().from_notebook_node(nb) assert output == "dummy-script-exported" finally: sys.path.remove(p)
def test_scheduled_not_duplicated(self): om = self.om cells = [] conf = """ # schedule: daily, at 06:00 """.strip() cmd = "print('hello')" cells.append(v4.new_code_cell(source=conf)) cells.append(v4.new_code_cell(source=cmd)) notebook = v4.new_notebook(cells=cells) om.jobs.put(notebook, 'testjob') self._check_scheduled_job(autorun=False, reschedule=False) meta = om.jobs.metadata('testjob') trigger = meta.attributes['triggers'] self.assertEqual(len(trigger), 1)
def test_embedhtml(self): """Test exporter for embedding images into HTML""" nb = v4.new_notebook(cells=[ v4.new_code_cell(source="a = 'world'"), v4.new_markdown_cell( source="![testimage]({})".format(path_in_data('icon.png'))), ]) def check(byte_string, root_node): nodes = root_node.findall(".//img") for n in nodes: url = n.attrib["src"] assert url.startswith('data') self.check_html(nb, 'html_embed', check_func=check)
def test_save_code_solution_cell(self, preprocessor, gradebook, resources): cell = create_solution_cell("hello", "code", "foo") cell.metadata.nbgrader['checksum'] = compute_checksum(cell) nb = new_notebook() nb.cells.append(cell) nb, resources = preprocessor.preprocess(nb, resources) gradebook.find_solution_cell("foo", "test", "ps0") source_cell = gradebook.find_source_cell("foo", "test", "ps0") assert source_cell.source == "hello" assert source_cell.checksum == cell.metadata.nbgrader["checksum"] assert source_cell.cell_type == "code" assert not source_cell.locked
def test_coalesce_replace_streams(self): """Are \\r characters handled?""" outputs = [nbformat.new_output(output_type="stream", name="stdout", text="z"), nbformat.new_output(output_type="stream", name="stdout", text="\ra"), nbformat.new_output(output_type="stream", name="stdout", text="\nz\rb"), nbformat.new_output(output_type="stream", name="stdout", text="\nz"), nbformat.new_output(output_type="stream", name="stdout", text="\rc\n"), nbformat.new_output(output_type="stream", name="stdout", text="z\rz\rd")] cells=[nbformat.new_code_cell(source="# None", execution_count=1,outputs=outputs)] nb = nbformat.new_notebook(cells=cells) res = self.build_resources() nb, res = coalesce_streams(nb, res) outputs = nb.cells[0].outputs self.assertEqual(outputs[0].text, u'a\nb\nc\nd')
def py_to_ipynb(path, py_filename): print('Converting {}'.format(os.path.join(path, filename))) ipynb_filename = py_filename.split('.')[0] + '.ipynb' with open(os.path.join(path, py_filename), 'r') as f: red = RedBaron(f.read()) # detect whether the state has changed and thus need to flush the code up to that point before processing a node sources = [] cell_source = [] prev_type = red[0].type # concat_state = [False] for node in red[1:]: cur_type = node.type # ignore blank lines if cur_type == 'endl': continue if should_concat(prev_type, cur_type): cell_source.append(node.dumps()) else: sources.append('\n'.join(cell_source)) cell_source = [node.dumps()] prev_type = cur_type # last cell, special handling cell_source = [] # the value of the IfNode node_value = node.value[0].value # just include all the argparse lines for line in node_value: cell_source.append(line.dumps()) sources.append('\n'.join(cell_source)) # build cells and notebook cells = [new_code_cell(source=source) for source in sources if source] notebook = new_notebook(cells=cells) # output with open(os.path.join(path, ipynb_filename), 'w') as f: nbformat.write(notebook, f)
def notebook(setup) -> NotebookNode: """Generates a Jupyter notebook""" welcome = textwrap.dedent(""" # Welcome This notebook was generated by the eWaterCycle experiment launcher. """) cells = [ new_markdown_cell(welcome), new_code_cell('import pandas as pd'), new_code_cell("""from hymuse.community.{0}.interface import {0} from ewatercycle import parameter_fetcher """.format(setup['model'])), new_code_cell( textwrap.dedent("""# Construct model model = {0}(hostname="localhost") # Setup model based on region, # downloads input files of the region from HydroShare and configure parameters with their paths parameterset = parameter_fetcher(region='{1}', target_model={0}) model.parameters.reset_from_memento(parameterset) print model.outlets # outlets (set of measurement stations) print model.grid # grid with attributes print model.subsurface_grid # ?? print model.state # full state, collection of all grid, sets, not implemented in AMUSE/OMUSE currently""" .format(setup['model'], setup['region']))), new_code_cell( textwrap.dedent("""# Store outlet of each step water_levels = [] # Run model in daily steps steps = pd.date_range(start='{0}', end='{1}', freq='D') for step in steps: model.evolve_model(step) outlets.append(model.outlets[0].water_level) write_set_to_file(model.grid,filename, 'netcdf')""".format( setup['period']['start'], setup['period']['end']))), new_code_cell( textwrap.dedent("""import matplotlib.pyplot as plt plt.ion()""")), new_code_cell( textwrap.dedent( """# Plot water level at first outlet for each model step plt.plot(steps, water_level)""")), ] return new_notebook(cells=cells, metadata=PY3_META)
def tester(import_list_dict): # http://nbviewer.ipython.org/gist/fperez/9716279 for import_dict in import_list_dict: code = import_dict['code'] target = import_dict['targets'] nb = v4.new_notebook() cells = [v4.new_code_cell(code)] nb['cells'].extend(cells) fname = tempfile.NamedTemporaryFile(suffix='.ipynb').name with open(fname, 'w') as f: f.write(v4.writes(nb)) # parse the notebook! assert target == depfinder.notebook_path_to_dependencies(fname) os.remove(fname)
def test_job_put_get(self): """ test job put and get """ om = self.om # create a notebook cells = [] code = "print 'hello'" cells.append(v4.new_code_cell(source=code)) notebook = v4.new_notebook(cells=cells) # put the notebook meta = om.jobs.put(notebook, 'testjob') self.assertEqual(meta.name, 'testjob.ipynb') # read it back and see what's in it notebook2 = om.jobs.get('testjob') self.assertDictEqual(notebook2, notebook)
def build_notebook(self): """Build a reveal slides notebook in memory for use with tests. Overrides base in PreprocessorTestsBase""" outputs = [ nbformat.new_output(output_type='display_data', data={'image/svg+xml': self.simple_svg}) ] cells = [ nbformat.new_code_cell(source="", execution_count=1, outputs=outputs) ] return nbformat.new_notebook(cells=cells)
def _empty_notebook(self, path, kernel=None): nb = new_notebook() if kernel is not None: nb.metadata.kernelspec = { "display_name": "kernel", "language": kernel, "name": kernel } full_dest = os.path.abspath(path) if not os.path.exists(os.path.dirname(full_dest)): os.makedirs(os.path.dirname(full_dest)) if os.path.exists(full_dest): remove(full_dest) with io.open(full_dest, mode='w', encoding='utf-8') as f: write_nb(nb, f, 4)