def test_save(self): resp = self.nb_api.read('a.ipynb', 'foo') nbcontent = json.loads(resp.text)['content'] nb = to_notebook_json(nbcontent) ws = new_worksheet() nb.worksheets = [ws] ws.cells.append(new_heading_cell(u'Created by test ³')) nbmodel= {'name': 'a.ipynb', 'path':'foo', 'content': nb} resp = self.nb_api.save('a.ipynb', path='foo', body=json.dumps(nbmodel)) nbfile = pjoin(self.notebook_dir.name, 'foo', 'a.ipynb') with io.open(nbfile, 'r', encoding='utf-8') as f: newnb = read(f, format='ipynb') self.assertEqual(newnb.worksheets[0].cells[0].source, u'Created by test ³') nbcontent = self.nb_api.read('a.ipynb', 'foo').json()['content'] newnb = to_notebook_json(nbcontent) self.assertEqual(newnb.worksheets[0].cells[0].source, u'Created by test ³') # Save and rename nbmodel= {'name': 'a2.ipynb', 'path':'foo/bar', 'content': nb} resp = self.nb_api.save('a.ipynb', path='foo', body=json.dumps(nbmodel)) saved = resp.json() self.assertEqual(saved['name'], 'a2.ipynb') self.assertEqual(saved['path'], 'foo/bar') assert os.path.isfile(pjoin(self.notebook_dir.name,'foo','bar','a2.ipynb')) assert not os.path.isfile(pjoin(self.notebook_dir.name, 'foo', 'a.ipynb')) with assert_http_error(404): self.nb_api.read('a.ipynb', 'foo')
def test_save(self): resp = self.nb_api.read('a.ipynb', 'foo') nbcontent = json.loads(resp.text)['content'] nb = to_notebook_json(nbcontent) ws = new_worksheet() nb.worksheets = [ws] ws.cells.append(new_heading_cell(u'Created by test ³')) nbmodel = {'name': 'a.ipynb', 'path': 'foo', 'content': nb} resp = self.nb_api.save( 'a.ipynb', path='foo', body=json.dumps(nbmodel)) nbfile = pjoin(self.notebook_dir.name, 'foo', 'a.ipynb') with io.open(nbfile, 'r', encoding='utf-8') as f: newnb = read(f, format='ipynb') self.assertEqual(newnb.worksheets[0].cells[0].source, u'Created by test ³') nbcontent = self.nb_api.read('a.ipynb', 'foo').json()['content'] newnb = to_notebook_json(nbcontent) self.assertEqual(newnb.worksheets[0].cells[0].source, u'Created by test ³') # Save and rename nbmodel = {'name': 'a2.ipynb', 'path': 'foo/bar', 'content': nb} resp = self.nb_api.save( 'a.ipynb', path='foo', body=json.dumps(nbmodel)) saved = resp.json() self.assertEqual(saved['name'], 'a2.ipynb') self.assertEqual(saved['path'], 'foo/bar') assert os.path.isfile( pjoin(self.notebook_dir.name, 'foo', 'bar', 'a2.ipynb')) assert not os.path.isfile( pjoin(self.notebook_dir.name, 'foo', 'a.ipynb')) with assert_http_error(404): self.nb_api.read('a.ipynb', 'foo')
def process_header(self, header): """ Process a header from pandoc json """ level, attrs, contents = header contents = self.process_inline(contents) self._add_cell(nbf.new_heading_cell(contents, metadata=META_SLIDE, level=level))
def test_checkpoints(self): resp = self.api.read('a.ipynb', 'foo') r = self.api.new_checkpoint('a.ipynb', 'foo') self.assertEqual(r.status_code, 201) cp1 = r.json() self.assertEqual(set(cp1), {'id', 'last_modified'}) self.assertEqual(r.headers['Location'].split('/')[-1], cp1['id']) # Modify it nbcontent = json.loads(resp.text)['content'] nb = to_notebook_json(nbcontent) ws = new_worksheet() nb.worksheets = [ws] hcell = new_heading_cell('Created by test') ws.cells.append(hcell) # Save nbmodel = { 'name': 'a.ipynb', 'path': 'foo', 'content': nb, 'type': 'notebook' } resp = self.api.save('a.ipynb', path='foo', body=json.dumps(nbmodel)) # List checkpoints cps = self.api.get_checkpoints('a.ipynb', 'foo').json() self.assertEqual(cps, [cp1]) nbcontent = self.api.read('a.ipynb', 'foo').json()['content'] nb = to_notebook_json(nbcontent) self.assertEqual(nb.worksheets[0].cells[0].source, 'Created by test') # Restore cp1 r = self.api.restore_checkpoint('a.ipynb', 'foo', cp1['id']) self.assertEqual(r.status_code, 204) nbcontent = self.api.read('a.ipynb', 'foo').json()['content'] nb = to_notebook_json(nbcontent) self.assertEqual(nb.worksheets, []) # Delete cp1 r = self.api.delete_checkpoint('a.ipynb', 'foo', cp1['id']) self.assertEqual(r.status_code, 204) cps = self.api.get_checkpoints('a.ipynb', 'foo').json() self.assertEqual(cps, [])
def test_contents_manager(self): "make sure ContentsManager returns right files (ipynb, bin, txt)." nbdir = self.notebook_dir.name base = self.base_url() nb = new_notebook(name='testnb') ws = new_worksheet() nb.worksheets = [ws] ws.cells.append(new_heading_cell(u'Created by test ³')) cc1 = new_code_cell(input=u'print(2*6)') cc1.outputs.append(new_output(output_text=u'12', output_type='stream')) ws.cells.append(cc1) with io.open(pjoin(nbdir, 'testnb.ipynb'), 'w', encoding='utf-8') as f: write(nb, f, format='ipynb') with io.open(pjoin(nbdir, 'test.bin'), 'wb') as f: f.write(b'\xff' + os.urandom(5)) f.close() with io.open(pjoin(nbdir, 'test.txt'), 'w') as f: f.write(u'foobar') f.close() r = requests.get(url_path_join(base, 'files', 'testnb.ipynb')) self.assertEqual(r.status_code, 200) self.assertIn('print(2*6)', r.text) json.loads(r.text) r = requests.get(url_path_join(base, 'files', 'test.bin')) self.assertEqual(r.status_code, 200) self.assertEqual(r.headers['content-type'], 'application/octet-stream') self.assertEqual(r.content[:1], b'\xff') self.assertEqual(len(r.content), 6) r = requests.get(url_path_join(base, 'files', 'test.txt')) self.assertEqual(r.status_code, 200) self.assertEqual(r.headers['content-type'], 'text/plain') self.assertEqual(r.text, 'foobar')
def setUp(self): nbdir = self.notebook_dir.name if not os.path.isdir(pjoin(nbdir, 'foo')): os.mkdir(pjoin(nbdir, 'foo')) nb = new_notebook(name='testnb') ws = new_worksheet() nb.worksheets = [ws] ws.cells.append(new_heading_cell(u'Created by test ³')) cc1 = new_code_cell(input=u'print(2*6)') cc1.outputs.append(new_output(output_text=u'12')) cc1.outputs.append(new_output(output_png=png_green_pixel, output_type='pyout')) ws.cells.append(cc1) with io.open(pjoin(nbdir, 'foo', 'testnb.ipynb'), 'w', encoding='utf-8') as f: write(nb, f, format='ipynb') self.nbconvert_api = NbconvertAPI(self.base_url())
def test_checkpoints(self): resp = self.nb_api.read('a.ipynb', 'foo') r = self.nb_api.new_checkpoint('a.ipynb', 'foo') self.assertEqual(r.status_code, 201) cp1 = r.json() self.assertEqual(set(cp1), {'id', 'last_modified'}) self.assertEqual(r.headers['Location'].split('/')[-1], cp1['id']) # Modify it nbcontent = json.loads(resp.text)['content'] nb = to_notebook_json(nbcontent) ws = new_worksheet() nb.worksheets = [ws] hcell = new_heading_cell('Created by test') ws.cells.append(hcell) # Save nbmodel = {'name': 'a.ipynb', 'path': 'foo', 'content': nb} resp = self.nb_api.save( 'a.ipynb', path='foo', body=json.dumps(nbmodel)) # List checkpoints cps = self.nb_api.get_checkpoints('a.ipynb', 'foo').json() self.assertEqual(cps, [cp1]) nbcontent = self.nb_api.read('a.ipynb', 'foo').json()['content'] nb = to_notebook_json(nbcontent) self.assertEqual(nb.worksheets[0].cells[0].source, 'Created by test') # Restore cp1 r = self.nb_api.restore_checkpoint('a.ipynb', 'foo', cp1['id']) self.assertEqual(r.status_code, 204) nbcontent = self.nb_api.read('a.ipynb', 'foo').json()['content'] nb = to_notebook_json(nbcontent) self.assertEqual(nb.worksheets, []) # Delete cp1 r = self.nb_api.delete_checkpoint('a.ipynb', 'foo', cp1['id']) self.assertEqual(r.status_code, 204) cps = self.nb_api.get_checkpoints('a.ipynb', 'foo').json() self.assertEqual(cps, [])
def setUp(self): nbdir = self.notebook_dir.name if not os.path.isdir(pjoin(nbdir, 'foo')): os.mkdir(pjoin(nbdir, 'foo')) nb = new_notebook(name='testnb') ws = new_worksheet() nb.worksheets = [ws] ws.cells.append(new_heading_cell(u'Created by test ³')) cc1 = new_code_cell(input=u'print(2*6)') cc1.outputs.append(new_output(output_text=u'12')) cc1.outputs.append( new_output(output_png=png_green_pixel, output_type='pyout')) ws.cells.append(cc1) with io.open(pjoin(nbdir, 'foo', 'testnb.ipynb'), 'w', encoding='utf-8') as f: write(nb, f, format='ipynb') self.nbconvert_api = NbconvertAPI(self.base_url())
def visit_title(self, node): #make sure we have a valid heading level between 1 and 6 heading_level = min(self.section_level, 5) + 1 h = nbformat.new_heading_cell(source=node.astext(), level=heading_level) self.add_cell(h)
def process_header(self, header): """ Process a header from pandoc json """ level, attrs, contents = header contents = self.process_inline(contents) self._add_cell( nbf.new_heading_cell(contents, metadata=META_SLIDE, level=level))
def write_notebook(pseudopath, with_eos=False, tmpfile=None): """ Write an ipython notebook to pseudopath. By default, the notebook is created in the same directory as pseudopath but with the extension `ipynb` unless `tmpfile` is set to True. In the later case, a temporay file is created. Args: pseudo: Path to the pseudopotential file. with_eos: True if EOS plots are wanted. Returns: The path to the ipython notebook. See http://nbviewer.ipython.org/gist/fperez/9716279 """ from IPython.nbformat import current as nbf # from IPython.nbformat import v3 as nbf # import IPython.nbformat as nbf nb = nbf.new_notebook() cells = [ nbf.new_heading_cell("This is an auto-generated notebook for %s" % os.path.basename(pseudopath)), nbf.new_code_cell( """\ from __future__ import print_function %matplotlib inline import mpld3 from mpld3 import plugins as plugs plugs.DEFAULT_PLUGINS = [plugs.Reset(), plugs.Zoom(), plugs.BoxZoom(), plugs.MousePosition()] mpld3.enable_notebook() import seaborn as sns #sns.set(style="dark", palette="Set2") sns.set(style='ticks', palette='Set2')""" ), nbf.new_code_cell( """\ # Construct the pseudo object and get the DojoReport from pymatgen.io.abinitio.pseudos import Pseudo pseudo = Pseudo.from_file('%s') report = pseudo.dojo_report""" % os.path.abspath(pseudopath) ), nbf.new_heading_cell("ONCVPSP Input File:"), nbf.new_code_cell( """\ input_file = pseudo.filepath.replace(".psp8", ".in") %cat $input_file""" ), nbf.new_code_cell( """\ # Get data from the output file from pseudo_dojo.ppcodes.oncvpsp import OncvOutputParser, PseudoGenDataPlotter onc_parser = OncvOutputParser(pseudo.filepath.replace(".psp8", ".out")) # Parse the file and build the plotter onc_parser.scan() plotter = onc_parser.make_plotter()""" ), nbf.new_heading_cell("AE and PS radial wavefunctions $\phi(r)$:"), nbf.new_code_cell("fig = plotter.plot_radial_wfs(show=False)"), nbf.new_heading_cell("Arctan of the logarithmic derivatives:"), nbf.new_code_cell("fig = plotter.plot_atan_logders(show=False)"), nbf.new_heading_cell("Convergence in $G$-space estimated by ONCVPSP:"), nbf.new_code_cell("fig = plotter.plot_ene_vs_ecut(show=False)"), nbf.new_heading_cell("Projectors:"), nbf.new_code_cell("fig = plotter.plot_projectors(show=False)"), nbf.new_heading_cell("Core-Valence-Model charge densities:"), nbf.new_code_cell("fig = plotter.plot_densities(show=False)"), nbf.new_heading_cell("Local potential and $l$-dependent potentials:"), nbf.new_code_cell("fig = plotter.plot_potentials(show=False)"), # nbf.new_heading_cell("1-st order derivative of $v_l$ and $v_{loc}$ computed via finite differences:"), # nbf.new_code_cell("""fig = plotter.plot_der_potentials(order=1, show=False)"""), # nbf.new_heading_cell("2-nd order derivative of $v_l$ and $v_{loc}$ computed via finite differences:"), # nbf.new_code_cell("""fig = plotter.plot_der_potentials(order=2, show=False)"""), nbf.new_heading_cell("Model core charge and form factors computed by ABINIT"), nbf.new_code_cell( """\ with pseudo.open_pspsfile() as psps: psps.plot()""" ), nbf.new_heading_cell("Convergence of the total energy:"), nbf.new_code_cell( """\ # Convergence of the total energy (computed from the deltafactor runs with Wien2K equilibrium volume) fig = report.plot_etotal_vs_ecut(show=False)""" ), nbf.new_heading_cell("Convergence of the deltafactor results:"), nbf.new_code_cell( """fig = report.plot_deltafactor_convergence(what=("dfact_meV", "dfactprime_meV"), show=False)""" ), nbf.new_heading_cell("Convergence of $\Delta v_0$, $\Delta b_0$, and $\Delta b_1$ (deltafactor tests)"), nbf.new_code_cell( """\ # Here we plot the difference wrt Wien2k results. fig = report.plot_deltafactor_convergence(what=("-dfact_meV", "-dfactprime_meV"), show=False)""" ), nbf.new_heading_cell("deltafactor EOS for the different cutoff energies:"), nbf.new_code_cell("fig = report.plot_deltafactor_eos(show=False)"), nbf.new_heading_cell("Convergence of the GBRV lattice parameters:"), nbf.new_code_cell("fig = report.plot_gbrv_convergence(show=False)"), nbf.new_heading_cell("Convergence of phonon frequencies at $\Gamma$:"), nbf.new_code_cell("fig = report.plot_phonon_convergence(show=False)"), # nbf.new_heading_cell("Comparison with the other pseudos in this table"), # nbf.new_code_cell("""\ # from pseudo_dojo import get_pseudos # pseudos = get_pseudos(".") # if len(pseudos) > 1: # pseudos.dojo_compare()"""), ] if with_eos: # Add EOS plots cells.update( [ nbf.new_heading_cell("GBRV EOS for the FCC structure:"), nbf.new_code_cell("""fig = report.plot_gbrv_eos(struct_type="fcc", show=False)"""), nbf.new_heading_cell("GBRV EOS for the BCC structure:"), nbf.new_code_cell("""fig = report.plot_gbrv_eos(struct_type="bcc", show=False)"""), ] ) # Now that we have the cells, we can make a worksheet with them and add it to the notebook: nb["worksheets"].append(nbf.new_worksheet(cells=cells)) # Next, we write it to a file on disk that we can then open as a new notebook. # Note: This should be as easy as: nbf.write(nb, fname), but the current api is # a little more verbose and needs a real file-like object. if tmpfile is None: root, ext = os.path.splitext(pseudopath) nbpath = root + ".ipynb" else: import tempfile _, nbpath = tempfile.mkstemp(suffix=".ipynb", text=True) with open(nbpath, "wt") as f: nbf.write(nb, f, "ipynb") return nbpath
def visit_title(self, node): h = nbformat.new_heading_cell(source=node.astext()) self.add_cell(h)