def diff_code(self, revision1, revision2): self.keeper._checkout(revision1) nb1 = current.reads( open( os.path.join( self.keeper.work_dir, self.keeper.project_name, self.keeper.project_config['internal-path'], self.notebook, "notebook" + ".ipynb"), "r").read(), "ipynb") self.keeper._checkout(revision2) nb2 = current.reads( open( os.path.join( self.keeper.work_dir, self.keeper.project_name, self.keeper.project_config['internal-path'], self.notebook, "notebook" + ".ipynb"), "r").read(), "ipynb") result_nb = notebook_diff(nb1, nb2) filename_placeholder = "{}-{}: {} and {}".format( self.keeper.project_name, self.notebook, revision1, revision2) app.add_notebook(result_nb, filename_placeholder) print "This diff have number {}".format(len(app.notebooks) - 1)
def test_running_renamed(): "running a renamed notebook file renames its internal metadata" newname = prefix + 'renamed_notebook' shutil.copy('simple_notebook.ipynb', newname + '.ipynb') with file(newname + '.ipynb') as nbf: nb = reads(nbf.read(), 'ipynb') npt.assert_(nb.metadata.name != newname) os.system('nbrun -q ' + newname + '.ipynb') with file(newname + '.ipynb') as nbf: nb = reads(nbf.read(), 'ipynb') npt.assert_(nb.metadata.name == newname)
def test_running_renamed(): "running a renamed notebook file renames its internal metadata" newname = prefix + "renamed_notebook" shutil.copy("simple_notebook.ipynb", newname + ".ipynb") with file(newname + ".ipynb") as nbf: nb = reads(nbf.read(), "ipynb") npt.assert_(nb.metadata.name != newname) os.system("nbrun -q " + newname + ".ipynb") with file(newname + ".ipynb") as nbf: nb = reads(nbf.read(), "ipynb") npt.assert_(nb.metadata.name == newname)
def _parse_notebook(self, s): try: # v1 and v2 and json in the .ipynb files. nb = current.reads(s, u'json') except: raise web.HTTPError(500, u'Unreadable JSON notebook.') return nb
def process(self, request, filename, db_session): # format for notebook. format = u'json' data = request.form['download_data'] try: # read notebook and format it. nb = current.reads(data.decode('utf-8'), format) except: return "Unable to save notebook. Invalid JSON data" # if notebook has a name we use it else use a generic name try: name = nb.metadata.name except: name = "mergedNotebook" nb.metadata.name = name name = normalize('NFC', nb.metadata.name) # uses ipython's current ipynb formatting. notebook_formatted = current.writes(nb, format) # make a file download response response = make_response(notebook_formatted) header = "attachment; filename=mergedNotebook.ipynb" response.headers["Content-Type"] = "text/plain" response.headers["Content-Disposition"] = header return response
def run_and_save(src_notebook, dst_notebook, verbose=False, **kwargs): """ Run a notebook; populate its output cells; save as a new notebook. Params ------ src_notebook : file path of the source notebook dst_notebook : file path of the location to save the executed notebook verbose : set to true if for printed status messages kwargs : passed to `run_notebook` """ if verbose: print("Running %s" % src_notebook) with open(src_notebook) as f: nb = reads(f.read(), 'json') n_errors = run_notebook(nb, **kwargs) if verbose: print("\tNumber of errors: %d" % n_errors) if verbose: print("\tSaving to destination %s" % dst_notebook) with io.open(dst_notebook, 'w', encoding='utf8') as f: write(nb, f, 'json') if verbose: print("\tDone!") return n_errors
def save_new_notebook(self, data, name=None, format=u'json'): """Save a new notebook and return its notebook_id. If a name is passed in, it overrides any values in the notebook data and the value in the data is updated to use that value. """ if format not in self.allowed_formats: raise web.HTTPError(415, u'Invalid notebook format: %s' % format) try: nb = current.reads(data.decode('utf-8'), format) except: raise web.HTTPError(400, u'Invalid JSON data') if name is None: try: name = nb.metadata.name except AttributeError: raise web.HTTPError(400, u'Missing notebook name') nb.metadata.name = name path = os.path.join(self.notebook_dir, name+self.filename_ext) backend = None print 'save_new_notebook' nbo = backend.new_notebook_object(path) notebook_id = self.new_notebook_id(nbo) self.save_notebook_object(notebook_id, nb) return notebook_id
def collect(self): with self.fspath.open() as f: self.nb = reads(f.read(), 'json') # Start the cell count cell_num = 0 # Currently there is only 1 worksheet (it seems in newer versions # of IPython, they are going to get rid of this option) # For every worksheet, read every cell associated to it for ws in self.nb.worksheets: for cell in ws.cells: # Skip the cells that have text, headings or related stuff # Only test code cells if cell.cell_type == 'code': # If the code is a notebook magic cell, do not run # i.e. cell code starts with '%%' # Also ignore the cells that start with the # comment string PYTEST_VALIDATE_IGNORE_OUTPUT # NOTE: This actually skips execution, which probably isn't what we want! # It is typically helpful to execute the cell (to make sure that at # least the code doesn't fail) but then discard the result. if not (cell.input.startswith('%%') or cell.input.startswith(r'# PYTEST_VALIDATE_IGNORE_OUTPUT') or cell.input.startswith(r'#PYTEST_VALIDATE_IGNORE_OUTPUT')): yield IPyNbCell(self.name, self, cell_num, cell) else: # Skipped cells will not be counted continue # Update 'code' cell count cell_num += 1
def collect(self): with self.fspath.open() as f: self.nb = reads(f.read(), 'json') # Start the cell count cell_num = 0 # Currently there is only 1 worksheet (it seems in newer versions # of IPython, they are going to get rid of this option) # For every worksheet, read every cell associated to it for ws in self.nb.worksheets: for cell in ws.cells: # Skip the cells that have text, headings or related stuff # Only test code cells if cell.cell_type == 'code': # If the code is a notebook magic cell, do not run # i.e. cell code starts with '%%' # Also ignore the cells that start with the # comment string PYTEST_VALIDATE_IGNORE_OUTPUT # NOTE: This actually skips execution, which probably isn't what we want! # It is typically helpful to execute the cell (to make sure that at # least the code doesn't fail) but then discard the result. if not (cell.input.startswith('%%') or cell.input. startswith(r'# PYTEST_VALIDATE_IGNORE_OUTPUT') or cell.input.startswith( r'#PYTEST_VALIDATE_IGNORE_OUTPUT')): yield IPyNbCell(self.name, self, cell_num, cell) else: # Skipped cells will not be counted continue # Update 'code' cell count cell_num += 1
def get_notebook(self, name, path='', content=True): """ Takes a path and name for a notebook and returns its model Parameters ---------- name : str the name of the notebook path : str the URL path that describes the relative path for the notebook Returns ------- model : dict the notebook model. If contents=True, returns the 'contents' dict in the model as well. """ if not self.notebook_exists(name=name, path=path): raise web.HTTPError(404, u'Notebook does not exist: %s' % name) os_path = self._get_os_path(name, path) key = self.bucket.get_key(os_path) model = {} model['name'] = name model['path'] = path model['last_modified'] = key.last_modified model['created'] = key.last_modified model['type'] = 'notebook' if content: nb = current.reads(key.get_contents_as_string(), u'json') self.mark_trusted_cells(nb, name, path) model['content'] = nb return model
def load(ipynb, format='json'): """ load a notebook """ with open(ipynb, 'r') as f: nb = reads(f.read(), 'json') return nb
def __iter__(self): notebooks = [os.path.join(self.notebook_dir, i) for i in os.listdir(self.notebook_dir) if i.endswith('.ipynb') and 'generated' not in i] for ipynb in notebooks: with open(ipynb, 'r') as f: nb = reads(f.read(), 'json') yield ipynb, nb
def load_notebook(self, filename): try: with open(filename) as f: nb = reads(f.read(), 'json') except: return None else: return nb
def save_new_notebook(self, data, name=None, format=u'json'): """Save a new notebook and return its notebook_id.""" if format != 'json': raise Exception('Only supporting JSON in Django backed notebook') n = Notebook() n.id = str(uuid.uuid4()) if name != None: n.name = name nb = current.reads(data.decode('utf-8'), format) nb.metadata.name = name data = current.writes(nb, format) else: nb = current.reads(data.decode('utf-8'), format) n.name = nb.metadata.name n.content = data n.save(force_insert=True) self._archive(n) return n.id
def read_notebook_object(self, notebook_id): """Get the Notebook representation of a notebook by notebook_id.""" doc = self.collection.find_one({'_id': notebook_id}) if doc is None: raise web.HTTPError(500, u'Notebook % not found' % notebook_id) # Convert from MongoDB doc to plain JSON and then conver to notebook format jsonnb = dumps(doc['ipynb'] ) nb = current.reads( jsonnb, u'json') last_modified = dateutil.parser.parse(doc['created']) return last_modified, nb
def test_nooutput(nb_path): """Ensure that no cells have output.""" # Inspired by gist.github.com/minrk/3719849 with open(nb_path) as f: nb = current.reads(f.read(), 'json') for ws in nb.worksheets: for cell in ws.cells: if cell.cell_type == 'code': assert cell.outputs == []
def collect(self): with self.fspath.open() as f: self.notebook_folder = self.fspath.dirname self.nb = reads(f.read(), 'json') self.runner = NotebookRunner(self.nb) cell_num = 0 for cell in self.runner.iter_code_cells(): yield IPyNbCell(self.name, self, cell_num, cell) cell_num += 1
def collect(self): with self.fspath.open() as f: self.nb = reads(f.read(), 'json') cell_num = 0 for ws in self.nb.worksheets: for cell in ws.cells: if cell.cell_type == "code": yield IPyNbCell(self.name, self, cell_num, cell) cell_num += 1
def test_notebooks(): notebooks_path = 'notebooks' paths = os.listdir(notebooks_path) notebooks_filenames = [notebooks_path + '/' + name for name in paths if name.endswith('.ipynb')] for ipynb in notebooks_filenames: print("testing %s" % ipynb) with open(ipynb) as f: nb = reads(f.read(), 'json') run_notebook(nb)
def save_notebook(self, notebook_id, data, name=None, format=u'json'): """Save an existing notebook by notebook_id.""" if format != 'json': raise Exception('Only supporting JSON in Django backed notebook') n = Notebook.objects.get(id=notebook_id) if n.archive == True: raise Exception('Cannot update archived copy') # update copy if name != None: n.name = name nb = current.reads(data.decode('utf-8'), format) nb.metadata.name = name data = current.writes(nb, format) else: nb = current.reads(data.decode('utf-8'), format) n.name = nb.metadata.name n.content = data n.save() self._archive(n) return n.id
def get_notebook_object(self, path): last_modified = self.gist.updated_at filename = os.path.basename(path) content = self.get_notebook(filename) try: # v1 and v2 and json in the .ipynb files. nb = current.reads(content, u'json') except: raise # Always use the filename as the notebook name. nb.metadata.name = filename return last_modified, nb
def read_notebook_object_from_path(self, path): """read a notebook object from a path""" info = os.stat(path) last_modified = datetime.datetime.utcfromtimestamp(info.st_mtime) with open(path, 'r') as f: s = f.read() try: # v1 and v2 and json in the .ipynb files. nb = current.reads(s, u'json') except Exception as e: raise web.HTTPError(500, u'Unreadable JSON notebook: %s' % e) return last_modified, nb
def read_notebook_object_from_path(self, path): """read a notebook object from a path""" info = os.stat(path) last_modified = datetime.datetime.utcfromtimestamp(info.st_mtime) with open(path,'r') as f: s = f.read() try: # v1 and v2 and json in the .ipynb files. nb = current.reads(s, u'json') except Exception as e: raise web.HTTPError(500, u'Unreadable JSON notebook: %s' % e) return last_modified, nb
def test_notebooks(): notebooks_path = 'notebooks' paths = os.listdir(notebooks_path) notebooks_filenames = [ notebooks_path + '/' + name for name in paths if name.endswith('.ipynb') ] for ipynb in notebooks_filenames: print("testing %s" % ipynb) with open(ipynb) as f: nb = reads(f.read(), 'json') run_notebook(nb)
def read_notebook_object_from_path(self, path): """read a notebook object from a path""" info = os.stat(path) last_modified = tz.utcfromtimestamp(info.st_mtime) with open(path,'r') as f: s = f.read() try: # v1 and v2 and json in the .ipynb files. nb = current.reads(s, 'json') except ValueError as e: msg = "Unreadable Notebook: %s" % e raise web.HTTPError(400, msg, reason=msg) return last_modified, nb
def get_notebook_object(self, path): gist = self._get_gist_by_path(path) last_modified = gist.updated_at content = self.get_notebook(gist) try: # v1 and v2 and json in the .ipynb files. nb = current.reads(content, u'json') except: raise # Always use the filename as the notebook name. name = self._gist_name(gist) nb.metadata.name = name return last_modified, nb
def save_notebook(self, notebook_id, data, name=None, format=u'json'): """Save an existing notebook by notebook_id.""" if format not in self.allowed_formats: raise web.HTTPError(415) try: nb = current.reads(data, format) except: raise web.HTTPError(400) if name is not None: nb.metadata.name = name self.save_notebook_object(notebook_id, nb)
def read_notebook_object_from_path(self, path): """read a notebook object from a path""" info = os.stat(path) last_modified = tz.utcfromtimestamp(info.st_mtime) with open(path, 'r') as f: s = f.read() try: # v1 and v2 and json in the .ipynb files. nb = current.reads(s, u'json') except ValueError as e: msg = u"Unreadable Notebook: %s" % e raise RuntimeError(msg) return last_modified, nb
def save_notebook(self, notebook_id, data, name=None, format=u'json'): """Save an existing notebook by notebook_id.""" if format not in self.allowed_formats: raise web.HTTPError(415, u'Invalid notebook format: %s' % format) try: nb = current.reads(data.decode('utf-8'), format) except: raise web.HTTPError(400, u'Invalid JSON data') if name is not None: nb.metadata.name = name self.save_notebook_object(notebook_id, nb)
def save_notebook(self, notebook_id, data, name=None, format=u'json'): """Save an existing notebook by notebook_id.""" if format not in self.allowed_formats: raise web.HTTPError(415, u'Invalid notebook format: %s' % format) try: nb = current.reads(data.decode('utf-8'), format) except: raise web.HTTPError(400, u'Invalid JSON data') if name is not None: nb.metadata.name = name self.write_notebook_object(nb, notebook_id)
def autosave_notebook(self, notebook_id, data, client_id, name=None, format=u'json'): """Save an existing notebook by notebook_id.""" if format not in self.allowed_formats: raise web.HTTPError(415, u'Invalid notebook format: %s' % format) try: nb = current.reads(data.decode('utf-8'), format) except: raise web.HTTPError(400, u'Invalid JSON data') nbo = self.mapping[notebook_id] backend = nbo.backend backend.autosave_notebook(nb, nbo, client_id)
def run_nb_offline(nb_path): """ Read notebook from filepath and execute it; report errors in code cells. """ if not os.path.isfile(nb_path): raise Exception('Invalid path: %s' % nb_path) with open(nb_path) as f: nb = reads(f.read(), 'json') logging.info("Running notebook %s" % nb.metadata.name) km = KernelManager() km.start_kernel(stderr=open(os.devnull, 'w')) try: kc = km.client() except AttributeError: # 0.13 kc = km kc.start_channels() shell = kc.shell_channel # simple ping: shell.execute("pass") shell.get_msg() cells = 0 failures = 0 for ws in nb.worksheets: for cell in ws.cells: if cell.cell_type != 'code': #ONLY RUN CODE CELLS continue shell.execute(cell.input) # wait for finish, maximum TIMEOUT reply = shell.get_msg(timeout=MAX_TIMEOUT)['content'] if reply['status'] == 'error': failures += 1 logging.info("\nNotebook FAILURE:") logging.info(cell.input) logging.info('-----') logging.info('raised:') logging.info('\n'.join(reply['traceback'])) cells += 1 # sys.stdout.write('.') logging.info("Finished running notebook") logging.info(" ran %3i cells" % cells) if failures: logging.warning(" %3i cells raised exceptions" % failures) kc.stop_channels() km.shutdown_kernel() del km
def write_notebook(self, result, output='parsed-output.ipynb'): """ based on StackOverflow quetsion and answer: http://stackoverflow.com/questions/17972273/ is-there-a-ipython-notebook-api """ # open an empty notebook notebook = current.reads('', format='py') # add all elements as seperate cells notebook['worksheets'][0]['cells'] = list(map(current.new_code_cell, result)) # Save the notebook with io.open(output, 'w', encoding='utf-8') as f: current.write(notebook, f, format='ipynb')
def get_notebook(self, notebook_id, format=u'json'): """Get the representation of a notebook in format by notebook_id.""" if format != 'json': raise Exception('Only supporting JSON in Django backed notebook') n = Notebook.objects.get(id=notebook_id) # we want more informative names for archived notebooks nb = current.reads(n.content, format) nb.metadata.name = DjangoNotebookManager._name(n) kwargs = {} if format == 'json': # don't split lines for sending over the wire, because it should match # the Python in-memory format. kwargs['split_lines'] = False n.content = current.writes(nb, format, **kwargs) return n.updated_on, DjangoNotebookManager._name(n), n.content
def get_notebook_object(self, path): if not os.path.isfile(path): raise web.HTTPError(404, u'Notebook does not exist') info = os.stat(path) last_modified = datetime.datetime.utcfromtimestamp(info.st_mtime) with open(path,'r') as f: s = f.read() try: # v1 and v2 and json in the .ipynb files. nb = current.reads(s, u'json') except: raise web.HTTPError(500, u'Unreadable JSON notebook.') # Always use the filename as the notebook name. nb.metadata.name = os.path.splitext(os.path.basename(path))[0] return last_modified, nb
def run_nb_offline(nb_path): """ Read notebook from filepath and execute it; report errors in code cells. """ if not os.path.isfile(nb_path): raise Exception("Invalid path: %s" % nb_path) with open(nb_path) as f: nb = reads(f.read(), "json") logging.info("Running notebook %s" % nb.metadata.name) km = KernelManager() km.start_kernel(stderr=open(os.devnull, "w")) try: kc = km.client() except AttributeError: # 0.13 kc = km kc.start_channels() shell = kc.shell_channel # simple ping: shell.execute("pass") shell.get_msg() cells = 0 failures = 0 for ws in nb.worksheets: for cell in ws.cells: if cell.cell_type != "code": # ONLY RUN CODE CELLS continue shell.execute(cell.input) # wait for finish, maximum TIMEOUT reply = shell.get_msg(timeout=MAX_TIMEOUT)["content"] if reply["status"] == "error": failures += 1 logging.info("\nNotebook FAILURE:") logging.info(cell.input) logging.info("-----") logging.info("raised:") logging.info("\n".join(reply["traceback"])) cells += 1 # sys.stdout.write('.') logging.info("Finished running notebook") logging.info(" ran %3i cells" % cells) if failures: logging.warning(" %3i cells raised exceptions" % failures) kc.stop_channels() km.shutdown_kernel() del km
def get_notebook_object(self, notebook_id): """Get the NotebookNode representation of a notebook by notebook_id.""" path = self.find_path(notebook_id) if not os.path.isfile(path): raise web.HTTPError(404, u'Notebook does not exist: %s' % notebook_id) info = os.stat(path) last_modified = datetime.datetime.utcfromtimestamp(info.st_mtime) with open(path,'r') as f: s = f.read() try: # v1 and v2 and json in the .ipynb files. nb = current.reads(s, u'json') except: raise web.HTTPError(500, u'Unreadable JSON notebook.') if 'name' not in nb: nb.name = os.path.split(path)[-1].split(u'.')[0] return last_modified, nb
def read_notebook_object(self, notebook_id): """Get the object representation of a notebook by notebook_id.""" if not self.notebook_exists(notebook_id): raise web.HTTPError(404, NB_DNEXIST_ERR.format(notebook_id)) try: obj = self.container.get_object(notebook_id) # Read in the entire notebook file into s s = obj.get() except: raise web.HTTPError(500, 'Notebook cannot be read.') try: nb = current.reads(s, 'json') except: raise web.HTTPError(500, 'Unreadable JSON notebook.') last_modified = utcnow() return last_modified, nb
def get_notebook_object(self, notebook_id): """Get the NotebookNode representation of a notebook by notebook_id.""" path = self.find_path(notebook_id) if not os.path.isfile(path): raise web.HTTPError(404, u'Notebook does not exist: %s' % notebook_id) info = os.stat(path) last_modified = datetime.datetime.utcfromtimestamp(info.st_mtime) with open(path, 'r') as f: s = f.read() try: # v1 and v2 and json in the .ipynb files. nb = current.reads(s, u'json') except: raise web.HTTPError(500, u'Unreadable JSON notebook.') if 'name' not in nb: nb.name = os.path.split(path)[-1].split(u'.')[0] return last_modified, nb
def read_notebook_object(self, notebook_id): """Get the object representation of a notebook by notebook_id.""" if not self.notebook_exists(notebook_id): raise web.HTTPError(404, u'Notebook does not exist: %s' % notebook_id) try: s = self.blob_service.get_blob(self.container, notebook_id) except: raise web.HTTPError(500, u'Notebook cannot be read.') try: # v1 and v2 and json in the .ipynb files. nb = current.reads(s, u'json') except: raise web.HTTPError(500, u'Unreadable JSON notebook.') # Todo: The last modified should actually be saved in the notebook document. # We are just using the current datetime until that is implemented. last_modified = tz.utcnow() return last_modified, nb
def read_notebook_object(self, notebook_id): if not self.notebook_exists(notebook_id): raise web.HTTPError(404, u'Notebook does not exist: %s' % notebook_id) try: key = self.bucket.get_key(self.s3_prefix + notebook_id) s = key.get_contents_as_string() except: raise web.HTTPError(500, u'Notebook cannot be read.') try: # v1 and v2 and json in the .ipynb files. nb = current.reads(s, u'json') except: raise web.HTTPError(500, u'Unreadable JSON notebook.') # Todo: The last modified should actually be saved in the notebook document. # We are just using the current datetime until that is implemented. last_modified = datetime.datetime.utcnow() return last_modified, nb
def read_notebook_object(self, notebook_id): """Get the object representation of a notebook by notebook_id.""" if not self.notebook_exists(notebook_id): raise web.HTTPError(404, u'Notebook does not exist: %s' % notebook_id) try: node_path = '/%s?download' % self.shock_map[notebook_id]['id'] node_data = self._get_shock_node(node_path, 'data') except: raise web.HTTPError(500, u'Notebook cannot be read') try: # v1 and v2 and json in the .ipynb files. nb = current.reads(node_data, u'json') except: raise web.HTTPError(500, u'Unreadable JSON notebook.\n%s' % node_data) dt = self.shock_map[notebook_id]['attributes']['created'] last_modified = dateutil.parser.parse( dt) if dt else datetime.datetime.utcnow().isoformat() return last_modified, nb
def get_notebook(self, name, path='', content=True): """ Takes a path and name for a notebook and returns its model Parameters ---------- name : str the name of the notebook path : str the URL path that describes the relative path for the notebook Returns ------- model : dict the notebook model. If contents=True, returns the 'contents' dict in the model as well. """ path = path.strip('/') if not self.notebook_exists(name=name, path=path): raise web.HTTPError(404, u'Notebook does not exist: %s' % name) gist = self._get_gist(name, path) # Create the notebook model. model ={} model['name'] = name model['path'] = path model['last_modified'] = gist.updated_at model['created'] = gist.created_at model['type'] = 'notebook' if content: notebook_content = gist.notebook_content try: nb = current.reads(notebook_content, u'json') except Exception as e: raise web.HTTPError(400, u"Unreadable Notebook: %s %s %s" % (path, name, e)) self.mark_trusted_cells(nb, path, name) # add gist id if public. if gist.public: nb['metadata']['gist_id'] = gist.id model['content'] = nb return model
def read_notebook_object(self, notebook_id): """Get the object representation of a notebook by notebook_id.""" if not self.notebook_exists(notebook_id): raise web.HTTPError(404, u'Notebook does not exist: %s' % notebook_id) try: # v1 and v2 and json in the .ipynb files. bucket = self.s3_conn.get_bucket(self.__bucket_name) k = Key(bucket) k.key = notebook_id data = k.get_contents_as_string() #self.log.info("downloaded contents: %s" % (data,)) except: raise web.HTTPError(500, u'Couldn\'t pull out of s3.') try: nb = current.reads(data, u'json') except: raise web.HTTPError(500, u'Unreadable JSON notebook.') # Todo: The last modified should actually be saved in the notebook document. # We are just using the current datetime until that is implemented. last_modified = datetime.datetime.utcnow() return last_modified, nb
def save_new_notebook(self, data, name=None, format=u'json'): """Save a new notebook and return its notebook_id. If a name is passed in, it overrides any values in the notebook data and the value in the data is updated to use that value. """ if format not in self.allowed_formats: raise web.HTTPError(415, u'Invalid notebook format: %s' % format) try: nb = current.reads(data.decode('utf-8'), format) except: raise web.HTTPError(400, u'Invalid JSON data') if name is None: try: name = nb.metadata.name except AttributeError: raise web.HTTPError(400, u'Missing notebook name') nb.metadata.name = name notebook_id = self.write_notebook_object(nb) return notebook_id
def _process_launchers(self, lvals): """ Saves the launch specifiers together in an IPython notebook for convenient viewing. Only offered as an option if IPython is available. """ from IPython.nbformat import current notebook_dir = os.environ.get('LANCET_NB_DIR', None) notebook_dir = notebook_dir if notebook_dir else os.getcwd() if self.input_options(['y', 'N'], 'Save IPython notebook?', default='n') == 'y': print('Notebook directory ($LANCET_NB_DIR): %s' % notebook_dir) isdir = False while not isdir: fname = raw_input('Filename: ').replace(' ', '_') fname = fname if fname.endswith('.ipynb') else fname + '.ipynb' nb_path = os.path.abspath(os.path.join(notebook_dir, fname)) isdir = os.path.isdir(os.path.split(nb_path)[0]) if not isdir: print('Invalid directory %s' % os.path.split(nb_path)[0]) ccell = '\n# <codecell>\n' mcell = '\n# <markdowncell>\n' header = ['# -*- coding: utf-8 -*-', '# <nbformat>3.0</nbformat>'] prelude = ['from lancet import *', '%load_ext lancet.ipython'] header_str = '\n'.join(header) + ccell + ccell.join(prelude) html_reprs = [ ccell + '(%r)' % lval[0].arg_specifier for lval in lvals ] zipped = [(mcell + '# #### Launch %d' % i, r) for (i, r) in enumerate(html_reprs)] body_str = ''.join([val for pair in zipped for val in pair]) node = current.reads(header_str + body_str, 'py') current.write(node, open(nb_path, 'w'), 'ipynb') print("Saved to %s " % nb_path)
continue failed = False for out, ref in zip(outs, cell.outputs): if not compare_outputs(out, ref): failed = True if failed: failures += 1 else: successes += 1 sys.stdout.write('.') print print "tested notebook %s" % nb.metadata.name print " %3i cells successfully replicated" % successes if failures: print " %3i cells mismatched output" % failures if errors: print " %3i cells failed to complete" % errors kc.stop_channels() km.shutdown_kernel() del km if __name__ == '__main__': for ipynb in sys.argv[1:]: print "testing %s" % ipynb with open(ipynb) as f: nb = reads(f.read(), 'json') test_notebook(nb)
def load_notebook(nb_path): with open(nb_path) as f: nb = current.reads(f.read(), 'json') return nb
def read_notebook(fname): with open(fname) as fh: notebook = nbformat.reads(''.join(fh.readlines()), format='ipynb') return notebook
inputDirectoryPath = '/furtherFormulas/' currentDirectory = os.getcwd() pythonSuffix = '.py' outputText = '' inputDirectory = os.listdir(currentDirectory + inputDirectoryPath) for fileName in inputDirectory: if fileName.endswith(pythonSuffix) and fileName != '__init__.py': outputText = '' outputFile = (currentDirectory + inputDirectoryPath + fileName[:(len(fileName) - 3)] + '.ipynb') f = open(currentDirectory + inputDirectoryPath + fileName, "r") lines = f.readlines() f.close() outputText += '# <codecell>\r\n' for line in lines: match = re.match(r'.*def (.*)\(.*:.*', line) if match != None and fileName != '3dAnimScatterPlotHdf5.py' and fileName != '3dBarWTauRAnim.py' and fileName != '3dBarChartAnim.py': outputText += '\r\n# <markdowncell>\r\n<a id=\''+match.group(1)+'\'></a>'+\ '<div style=\'font-size:1.7em;text-decoration:underline;font-weight:bold\'>'+match.group(1)+\ '</div>\r\n# <codecell>\r\n' outputText += line nb = nbf.reads(outputText, 'py') print 'out', outputFile nbf.write(nb, open(outputFile, 'w'), 'ipynb') print 'done'