def run_notebook(nb): km = KernelManager() km.start_kernel(stderr=open(os.devnull, 'w')) kc = km.client() kc.start_channels() try: kc.wait_for_ready() except AttributeError: # IPython < 3 kc.kernel_info() while True: msg = kc.get_shell_msg(block=True, timeout=30) if msg['msg_type'] == 'kernel_info_reply': break # Flush IOPub channel while True: try: msg = kc.get_iopub_msg(block=True, timeout=0.2) except Empty: break # simple ping: kc.execute("pass") kc.get_shell_msg() cells = 0 failures = 0 if hasattr(nb, 'worksheets'): # nobody uses more than 1 worksheet ws = nb.worksheets[0] else: # no more worksheet level in new format ws = nb for cell in ws.cells: if cell.cell_type != 'code': continue outputs, failed = run_cell(kc, cell) cell.outputs = outputs cell['prompt_number'] = cells failures += failed cells += 1 sys.stdout.write('.') sys.stdout.flush() print() print("ran %3i cells" % cells) if failures: print(" %3i cells raised exceptions" % failures) kc.stop_channels() km.shutdown_kernel() del km
def run_notebook(nb): km = KernelManager() km.start_kernel(stderr=open(os.devnull, 'w')) kc = km.client() kc.start_channels() cells = failures = 0 for cell in nb.cells: if cell.cell_type != 'code': continue kc.execute(cell.source) # Wait to finish try: reply = kc.get_shell_msg()['content'] except Empty: reply = {'status': 'error', 'traceback': ["Cell execution timed out!"]} if reply['status'] == 'error': failures += 1 print("\nFAILURE:") print(cell.source) print('-----') print("Raised:") print('\n'.join(reply['traceback'])) cells += 1 sys.stdout.write('.') kc.stop_channels() km.shutdown_kernel() return cells, failures
def run_notebook(nb): """Run notebook.""" km = KernelManager() km.start_kernel(stderr=open(os.devnull, 'w')) kc = km.client() kc.start_channels() kc.execute("pass") kc.get_shell_msg() cells = 0 failures = 0 for ws in nb.worksheets: for cell in ws.cells: if cell.cell_type != 'code': continue kc.execute(cell.input) # wait for finish, maximum 20s reply = kc.get_shell_msg(timeout=20)['content'] if reply['status'] == 'error': failures += 1 print("\nFAILURE:") print(cell.input) print('-----') print("raised:") print('\n'.join(reply['traceback'])) cells += 1 sys.stdout.write('.') # print "ran notebook %s" % nb.metadata.name print(" ran %3i cells" % cells) if failures: print(" %3i cells raised exceptions" % failures) kc.stop_channels() km.shutdown_kernel() del km return failures
class GroovyMagics(Magics): _execution_count = 1 def stop_kernel(self): self.kc.stop_channels() self.km.shutdown_kernel(now=True) def __init__(self, shell): super(GroovyMagics, self).__init__(shell) self.km = None def start(self): self.km = KernelManager() self.km.kernel_name = 'groovy' self.km.start_kernel() atexit.register(self.stop_kernel) self.kc = self.km.client() self.kc.start_channels() try: self.kc.wait_for_ready() print("Groovy started successfully\n") except AttributeError: self._wait_for_ready_backport() def run_cell(self, line, code): if not self.km: self.start() self.kc.execute(code, allow_stdin=True) reply = self.kc.get_shell_msg() while True: try: msg = self.kc.get_iopub_msg(timeout=1) if msg['msg_type'] == 'status': if msg['content']['execution_state'] == 'idle': break except Empty: print("empty ?!") raise self.shell.kernel.session.send( self.shell.kernel.iopub_socket, msg['msg_type'], msg['content'], metadata=msg['metadata'], parent=self.shell.kernel._parent_header, ident=msg.get('comm_id'), buffers=msg['buffers'], ) @cell_magic def groovy(self, line, cell): return self.run_cell(line, cell)
def get_client(cf, profile=None): """ Usage: >>> kc = get_client('kernel-143a2687-f294-42b1-bdcb-6f1cc2f4cc87.json', 'dale') >>> data = kc.execute("'123'") >>> data {u'text/plain': u'123'} """ connection_file = find_connection_file(cf, profile=profile) km = KernelManager(connection_file=connection_file) km.load_connection_file() client = km.client() return KernelClient(client)
def start_new_kernel(startup_timeout=60, kernel_name='python', **kwargs): """Start a new kernel, and return its Manager and Client""" km = KernelManager(kernel_name=kernel_name, kernel_spec_manager=NbvalKernelspecManager()) km.start_kernel(**kwargs) kc = km.client() kc.start_channels() try: kc.wait_for_ready(timeout=startup_timeout) except RuntimeError: kc.stop_channels() km.shutdown_kernel() raise return km, kc
def start_new_kernel(startup_timeout=60, kernel_name='python', spykernel=False, **kwargs): """Start a new kernel, and return its Manager and Client""" km = KernelManager(kernel_name=kernel_name) if spykernel: km._kernel_spec = SpyderKernelSpec() km.start_kernel(**kwargs) kc = km.client() kc.start_channels() try: kc.wait_for_ready(timeout=startup_timeout) except RuntimeError: kc.stop_channels() km.shutdown_kernel() raise return km, kc
def start_new_kernel(startup_timeout=60, kernel_name='python', **kwargs): """Start a new kernel, and return its Manager and Client""" logger.debug('Starting new kernel: "%s"' % kernel_name) km = KernelManager(kernel_name=kernel_name, kernel_spec_manager=NbvalKernelspecManager()) km.start_kernel(**kwargs) kc = km.client() kc.start_channels() try: kc.wait_for_ready(timeout=startup_timeout) except RuntimeError: logger.exception('Failure starting kernel "%s"', kernel_name) kc.stop_channels() km.shutdown_kernel() raise return km, kc
class JVMKernelMagic: def __init__(self, kernel_name, context): self.km = None self.kc = None self.comms = [] self.kernel_name = kernel_name self.context = context self.start() def start(self): self.km = KernelManager() self.km.kernel_name = self.kernel_name self.km.start_kernel(extra_arguments=[self.context]) self.kc = self.km.client() self.kc.start_channels() self.kc.wait_for_ready() def stop_kernel(self): self.kc.stop_channels() self.km.shutdown_kernel(now=True) def run_cell(self, code): if not self.km: self.start() self.kc.execute(code, allow_stdin=True) def get_shell_msg(self): return self.kc.get_shell_msg() def get_iopub_msg(self): try: msg = self.kc.get_iopub_msg(timeout=1) return msg except Empty: return None def pass_msg(self, msg_raw): msg_json = json.loads(msg_raw) content = msg_json['content'] msg_type = msg_json['header']['msg_type'] msg = self.kc.session.msg(msg_type, content) self.kc.shell_channel.send(msg) return None
def test_feature(kernel_name, feature): print("...{}".format(feature)) manager = KernelManager(kernel_name=kernel_name) manager.start_kernel() client = manager.client() with open(join(feature, "provided.json")) as fp: provided = json.load(fp) with open(join(feature, "expected.schema.json")) as fp: expected = json.load(fp) msg = client.session.msg(provided['header']['msg_type'], provided['content']) client.shell_channel.send(msg) response = client.shell_channel.get_msg() jsonschema.validate(instance=response, schema=expected)
def run_notebook(notebook): """Run the notebook""" kernel_manager = KernelManager() kernel_manager.start_kernel(stderr=open(os.devnull, 'w')) kernel_client = kernel_manager.client() kernel_client.start_channels() for sheet in notebook.worksheets: for (prompt_number, cell) in enumerate(sheet.cells, 1): if cell.cell_type != "code": continue cell.outputs = run_cell(kernel_client, cell) cell.prompt_number = prompt_number if cell.outputs and cell.outputs[0]['output_type'] == 'pyout': cell.outputs[0]["prompt_number"] = prompt_number kernel_manager.shutdown_kernel()
def km_from_string(s=''): """create kernel manager from IPKernelApp string such as '--shell=47378 --iopub=39859 --stdin=36778 --hb=52668' for IPython 0.11 or just 'kernel-12345.json' for IPython 0.12 """ try: import IPython except ImportError: raise ImportError("Could not find IPython. " + _install_instructions) try: from traitlets.config.loader import KeyValueConfigLoader except ImportError: # IPython <= 3.0 from IPython.config.loader import KeyValueConfigLoader try: from jupyter_client.manager import KernelManager from jupyter_client.connect import find_connection_file except ImportError: # IPython <= 3.0 try: from IPython.kernel import ( KernelManager, find_connection_file, ) except ImportError: # IPython < 1.0 from IPython.zmq.blockingkernelmanager import BlockingKernelManager as KernelManager from IPython.zmq.kernelapp import kernel_aliases try: from IPython.lib.kernel import find_connection_file except ImportError: # < 0.12, no find_connection_file pass global km, kc, send s = s.replace('--existing', '') if 'connection_file' in KernelManager.class_trait_names(): # 0.12 uses files instead of a collection of ports # include default IPython search path # filefind also allows for absolute paths, in which case the search # is ignored try: # XXX: the following approach will be brittle, depending on what # connection strings will end up looking like in the future, and # whether or not they are allowed to have spaces. I'll have to sync # up with the IPython team to address these issues -pi if '--profile' in s: k,p = s.split('--profile') k = k.lstrip().rstrip() # kernel part of the string p = p.lstrip().rstrip() # profile part of the string fullpath = find_connection_file(k,p) else: s = s.lstrip().rstrip(); if(len(s) == 0): fullpath = find_connection_file() else: fullpath = find_connection_file(s.lstrip().rstrip()) except IOError as e: echo(":IPython " + s + " failed", "Info") echo("^-- failed '" + s + "' not found", "Error") return km = KernelManager(connection_file = fullpath) km.load_connection_file() else: if s == '': echo(":IPython 0.11 requires the full connection string") return loader = KeyValueConfigLoader(s.split(), aliases=kernel_aliases) cfg = loader.load_config()['KernelApp'] try: km = KernelManager( shell_address=(ip, cfg['shell_port']), sub_address=(ip, cfg['iopub_port']), stdin_address=(ip, cfg['stdin_port']), hb_address=(ip, cfg['hb_port'])) except KeyError as e: echo(":IPython " +s + " failed", "Info") echo("^-- failed --"+e.message.replace('_port','')+" not specified", "Error") return try: kc = km.client() except AttributeError: # 0.13 kc = km kc.start_channels() try: send = kc.execute except AttributeError: # < 3.0 send = kc.shell_channel.execute #XXX: backwards compatibility for IPython < 0.13 try: import inspect sc = kc.shell_channel num_oinfo_args = len(inspect.getargspec(sc.object_info).args) if num_oinfo_args == 2: # patch the object_info method which used to only take one argument klass = sc.__class__ klass._oinfo_orig = klass.object_info klass.object_info = lambda s,x,y: s._oinfo_orig(x) except: pass #XXX: backwards compatibility for IPython < 1.0 if not hasattr(kc, 'iopub_channel'): kc.iopub_channel = kc.sub_channel # now that we're connect to an ipython kernel, activate completion # machinery, but do so only for the local buffer if the user added the # following line the vimrc: # let g:ipy_completefunc = 'local' vim.command(""" if g:ipy_completefunc == 'global' set completefunc=CompleteIPython elseif g:ipy_completefunc == 'local' setl completefunc=CompleteIPython endif """) # also activate GUI doc balloons if in gvim vim.command(""" if has('balloon_eval') set bexpr=IPythonBalloonExpr() endif """) set_pid() return km
def km_from_string(s=''): """create kernel manager from IPKernelApp string such as '--shell=47378 --iopub=39859 --stdin=36778 --hb=52668' for IPython 0.11 or just 'kernel-12345.json' for IPython 0.12 """ try: import IPython except ImportError: raise ImportError("Could not find IPython. " + _install_instructions) # Modified by BoPeng # #from IPython.config.loader import KeyValueConfigLoader from traitlets.config.loader import KeyValueConfigLoader try: # Updated by Bo Peng for module names from jupyter_client.manager import KernelManager from jupyter_client import find_connection_file except ImportError: # IPython < 1.0 from IPython.zmq.blockingkernelmanager import BlockingKernelManager as KernelManager from IPython.zmq.kernelapp import kernel_aliases try: from IPython.lib.kernel import find_connection_file except ImportError: # < 0.12, no find_connection_file pass global km, kc, send s = s.replace('--existing', '') if 'connection_file' in KernelManager.class_trait_names(): # 0.12 uses files instead of a collection of ports # include default IPython search path # filefind also allows for absolute paths, in which case the search # is ignored try: # XXX: the following approach will be brittle, depending on what # connection strings will end up looking like in the future, and # whether or not they are allowed to have spaces. I'll have to sync # up with the IPython team to address these issues -pi if '--profile' in s: k,p = s.split('--profile') k = k.lstrip().rstrip() # kernel part of the string p = p.lstrip().rstrip() # profile part of the string fullpath = find_connection_file(k,p) else: fullpath = find_connection_file(s.lstrip().rstrip()) except IOError as e: echo(":IPython " + s + " failed", "Info") echo("^-- failed '" + s + "' not found", "Error") return km = KernelManager(connection_file = fullpath) km.load_connection_file() else: if s == '': echo(":IPython 0.11 requires the full connection string") return loader = KeyValueConfigLoader(s.split(), aliases=kernel_aliases) cfg = loader.load_config()['KernelApp'] try: km = KernelManager( shell_address=(ip, cfg['shell_port']), sub_address=(ip, cfg['iopub_port']), stdin_address=(ip, cfg['stdin_port']), hb_address=(ip, cfg['hb_port'])) except KeyError as e: echo(":IPython " +s + " failed", "Info") echo("^-- failed --"+e.message.replace('_port','')+" not specified", "Error") return try: kc = km.client() except AttributeError: # 0.13 kc = km kc.start_channels() try: send = kc.execute except AttributeError: # < 3.0 send = kc.shell_channel.execute #XXX: backwards compatibility for IPython < 0.13 try: import inspect sc = kc.shell_channel num_oinfo_args = len(inspect.getargspec(sc.object_info).args) if num_oinfo_args == 2: # patch the object_info method which used to only take one argument klass = sc.__class__ klass._oinfo_orig = klass.object_info klass.object_info = lambda s,x,y: s._oinfo_orig(x) except Exception: pass #XXX: backwards compatibility for IPython < 1.0 if not hasattr(kc, 'iopub_channel'): kc.iopub_channel = kc.sub_channel # now that we're connect to an ipython kernel, activate completion # machinery, but do so only for the local buffer if the user added the # following line the vimrc: # let g:ipy_completefunc = 'local' vim.command(""" if g:ipy_completefunc == 'global' set completefunc=CompleteIPython elseif g:ipy_completefunc == 'local' setl completefunc=CompleteIPython endif """) # also activate GUI doc balloons if in gvim vim.command(""" if has('balloon_eval') set bexpr=IPythonBalloonExpr() endif """) set_pid() return km
class IPyKernel(object): """ A simple wrapper class to run cells in an IPython Notebook. Notes ----- - Use `with` construct to properly instantiate - IPython 3.0.0+ is assumed for this version """ def __init__(self, nb_version=4, extra_arguments=None): # default timeout time is 60 seconds self.default_timeout = 60 if extra_arguments is None: extra_arguments = [] self.extra_arguments = extra_arguments self.nb_version = nb_version def __enter__(self): self.km = KernelManager() self.km.start_kernel( extra_arguments=self.extra_arguments, stderr=open(os.devnull, 'w') ) self.kc = self.km.client() self.kc.start_channels() self.iopub = self.kc.iopub_channel self.shell = self.kc.shell_channel # run %pylab inline, because some notebooks assume this # even though they shouldn't self.shell.send("pass") self.shell.get_msg() while True: try: self.iopub.get_msg(timeout=0.05) except Exception as e: if repr(e) == 'Empty()': break # we got a real error so raise it raise self.cmd_list = [] self.msg_list = {} return self def clear(self): self.iopub.get_msgs() def execute(self, cmd): uid = self.kc.execute(cmd) self.cmd_list.append((uid, cmd)) return uid def __exit__(self, exc_type, exc_val, exc_tb): self.kc.stop_channels() self.km.shutdown_kernel() del self.msg_list del self.cmd_list del self.km def listen(self, uid, use_timeout=None): if use_timeout is None: use_timeout = self.default_timeout while True: if uid in self.msg_list and len(self.msg_list[uid]) > 0: return self.msg_list[uid].pop(0) msg = self.iopub.get_msg(timeout=use_timeout) if 'msg_id' in msg['parent_header']: msg_uid = msg['parent_header']['msg_id'] if msg_uid not in self.msg_list: self.msg_list[msg_uid] = [] self.msg_list[msg_uid].append(msg) def run(self, cell, use_timeout=None): """ Run a notebook cell in the IPythonKernel Parameters ---------- cell : IPython.notebook.Cell the cell to be run use_timeout : int or None (default) the time in seconds after which a cell is stopped and assumed to have timed out. If set to None the value in `default_timeout` is used Returns ------- list of ex_cell_outputs a list of NotebookNodes of the returned types. This is similar to the list of outputs generated when a cell is run """ if timeout is not None: use_timeout = use_timeout else: use_timeout = self.default_timeout if hasattr(cell, 'source'): uid = self.execute(cell.source) else: raise AttributeError('No source/input key') outs = [] stdout_cells = {} while True: msg = self.listen(uid, use_timeout) msg_type = msg['msg_type'] if msg_type == 'execute_input': continue elif msg_type == 'clear_output': outs = [] continue elif msg_type == 'status': if msg['content']['execution_state'] == 'idle': # we are done with the cell, let's compare break continue out_cell = nbformat.NotebookNode(output_type=msg_type) content = msg['content'] if msg_type == 'stream': name = content['name'] if name not in stdout_cells: out_cell.name = name out_cell.text = content['text'] stdout_cells[name] = out_cell outs.append(out_cell) else: # we already have a stdout cell, so append to it stdout_cells[name].text += content['text'] elif msg_type in ('display_data', 'execute_result'): if hasattr(content, 'execution_count'): out_cell['execution_count'] = content['execution_count'] else: out_cell['execution_count'] = None out_cell['data'] = content['data'] out_cell['metadata'] = content['metadata'] outs.append(out_cell) elif msg_type == 'error': out_cell.ename = content['ename'] out_cell.evalue = content['evalue'] out_cell.traceback = content['traceback'] outs.append(out_cell) elif msg_type.startswith('comm_'): # messages used to initialize, close and unpdate widgets # we will ignore these and hope for the best pass else: tv.warning("Unhandled iopub msg of type `%s`" % msg_type) return outs def get_commands(self, cell): """ Extract potential commands from the first line of a cell if a code cell starts with the hashbang `#!` it can be followed by a comma separated list of commands. Each command can be 1. a single key `skip`, or 2. a key/value pair separated by a colon `timeout:[int]` Parameters ---------- cell : a NotebookCell the cell to be examined Returns ------- dict a dict of key/value pairs. For a single command the value is `True` """ commands = {} source = cell.source if source is not None: lines = source.splitlines() if len(lines) > 0: n_line = 0 line = lines[n_line].strip() while line.startswith('#!') or len(line) == 0: txt = line[2:].strip() parts = txt.split(',') for part in parts: subparts = part.split(':') if len(subparts) == 1: commands[subparts[0].strip().lower()] = True elif len(subparts) == 2: commands[subparts[0].strip().lower()] = subparts[1] n_line += 1 line = lines[n_line] return commands def is_empty_cell(self, cell): """ Check if a cell has no code Parameters ---------- cell : a NotebookCell the cell to be examined Returns ------- bool True if the cell has no code, False otherwise """ return not bool(cell.source)
class IPyKernel(object): """ A simple wrapper class to run cells in an IPython Notebook. Notes ----- - Use `with` construct to properly instantiate - IPython 3.0.0+ is assumed for this version """ def __init__(self, nb_version=4, extra_arguments=None): # default timeout time is 60 seconds self.default_timeout = 60 if extra_arguments is None: extra_arguments = [] self.extra_arguments = extra_arguments self.nb_version = nb_version def __enter__(self): self.km = KernelManager() self.km.start_kernel( extra_arguments=self.extra_arguments, stderr=open(os.devnull, 'w') ) self.kc = self.km.client() self.kc.start_channels() self.iopub = self.kc.iopub_channel self.shell = self.kc.shell_channel # run %pylab inline, because some notebooks assume this # even though they shouldn't self.shell.send("pass") self.shell.get_msg() while True: try: self.iopub.get_msg(timeout=0.05) except Empty: break return self def execute(self, source): self.kc.execute(source + '\n') self.shell.get_msg(timeout=0.05) while True: try: msg = self.iopub.get_msg(timeout=0.05) except Empty: break def __exit__(self, exc_type, exc_val, exc_tb): self.kc.stop_channels() self.km.shutdown_kernel() del self.km def run(self, cell, timeout=None): """ Run a notebook cell in the IPythonKernel Parameters ---------- cell : IPython.notebook.Cell the cell to be run timeout : int or None (default) the time in seconds after which a cell is stopped and assumed to have timed out. If set to None the value in `default_timeout` is used Returns ------- list of outs a list of NotebookNodes of the returned types. This is similar to the list of outputs generated when a cell is run """ use_timeout = self.default_timeout if timeout is not None: use_timeout = timeout if hasattr(cell, 'input'): self.kc.execute(cell.input) elif hasattr(cell, 'source'): self.kc.execute(cell.source) else: raise AttributeError('No source/input key') self.shell.get_msg(timeout=use_timeout) outs = [] stdout_cells = {} while True: try: msg = self.iopub.get_msg(timeout=1.00) except Empty: break msg_type = msg['msg_type'] if msg_type in ('pyin', 'execute_input'): continue elif msg_type == 'clear_output': outs = [] continue elif msg_type == 'status': if msg['content']['execution_state'] == 'idle': # we are done with the cell, let's compare break continue content = msg['content'] out = nbformat.NotebookNode(output_type=msg_type) if msg_type == 'stream': name = content['name'] if name not in stdout_cells: out.name = name out.text = content['text'] stdout_cells[name] = out outs.append(out) else: # we already have a stdout cell, so append to it stdout_cells[name].text += content['text'] elif msg_type in ('display_data', 'pyout', 'execute_result'): if hasattr(content, 'execution_count'): out['execution_count'] = content['execution_count'] else: out['execution_count'] = None out['data'] = content['data'] out['metadata'] = content['metadata'] outs.append(out) elif msg_type == 'error': out.ename = content['ename'] out.evalue = content['evalue'] out.traceback = content['traceback'] outs.append(out) elif msg_type.startswith('comm_'): # messages used to initialize, close and unpdate widgets # we will ignore these and hope for the best pass else: print "unhandled iopub msg:", msg_type return outs @staticmethod def sanitize(s): """sanitize a string for comparison. fix universal newlines, strip trailing newlines, and normalize likely random values (memory addresses and UUIDs) Parameters ---------- s : str string to be sanitized, i.e. remove UUIDs, Hex-addresses, unnecessary newlines """ if not isinstance(s, basestring): return s # normalize newline: s = s.replace('\r\n', '\n') # ignore trailing newlines (but not space) s = s.rstrip('\n') # normalize hex addresses: s = re.sub(r'0x[a-f0-9]+', '0xFFFFFFFF', s) # normalize UUIDs: s = re.sub(r'[a-f0-9]{8}(\-[a-f0-9]{4}){3}\-[a-f0-9]{12}', 'U-U-I-D', s) # fix problem with return s def compare_outputs( self, test, ref, skip_compare=('traceback', 'latex', 'execution_count') ): """ Compare two lists of `NotebookNode`s Parameters ---------- test : list of `NotebookNode` the list of be tested generated by the kernel ref : list of `NotebookNode` the reference list read from the notebook skip_compare : list of str a list of strings that name node types that are not to be tested Returns ------- bool is True if both lists are different list of diff a list of diff (str) the represent the differences """ diff = False diff_list = [] # print ref.keys(), test.keys() if self.nb_version == 4: for key in ref: if key not in test: return True, ["missing key: %s != %s" % (test.keys(), ref.keys())] elif key not in skip_compare: if key == 'data': for data_key in test[key]: my_diff = self.do_diff( data_key, test[key], ref[key]) if my_diff is not None: diff_list += my_diff diff = True else: # can this happen? my_diff = self.do_diff(key, test, ref) if my_diff is not None: diff_list += my_diff diff = True return diff, diff_list def do_diff(self, key, test_cell, ref_cell): """ Compare the key of two dicts Parameters ---------- key : string the key to be compared test_cell : dict a dict with `key` as a key of string value ref_cell : dict a dict with `key` as a key of string value Returns ------- list of diff (str) a list of diff representing the differences """ if hasattr(ref_cell, key): s1 = self.sanitize(ref_cell[key]) else: s1 = '' if hasattr(test_cell, key): s2 = self.sanitize(test_cell[key]) else: s2 = '' if key in ['image/png', 'image/svg', 'image/svg+xml']: if s1 != s2: return ['>>> diff in %s (size new : %d vs size old : %d )' % (key, len(s1), len(s2))] else: if s1 != s2: expected = s1.splitlines(1) actual = s2.splitlines(1) diff = difflib.ndiff(expected, actual) return ['>>> diff in ' + key] + list(diff) return None def get_commands(self, cell): """ Extract potential commands from the first line of a cell if a code cell starts with the hashbang `#!` it can be followed by a comma separated list of commands. Each command can be 1. a single key `skip`, or 2. a key/value pair separated by a colon `timeout:[int]` Parameters ---------- cell : a NotebookCell the cell to be examined Returns ------- dict a dict of key/value pairs. For a single command the value is `True` """ commands = {} source = self.get_source(cell) if source is not None: lines = source.splitlines() if len(lines) > 0: n_line = 0 line = lines[n_line].strip() while line.startswith('#!') or len(line) == 0: txt = line[2:].strip() parts = txt.split(',') for part in parts: subparts = part.split(':') if len(subparts) == 1: commands[subparts[0].strip().lower()] = True elif len(subparts) == 2: commands[subparts[0].strip().lower()] = subparts[1] n_line += 1 line = lines[n_line] return commands def get_source(self, cell): """ get the source code of a cell Notes ----- This is legacy of IPython 2/3 conversion. Parameters ---------- cell : a NotebookCell the cell to be examined Returns ------- string the source code """ if cell.cell_type == 'code': if hasattr(cell, 'input'): return cell.input elif hasattr(cell, 'source'): return cell.source else: return None def is_empty_cell(self, cell): """ Check if a cell has no code Parameters ---------- cell : a NotebookCell the cell to be examined Returns ------- bool True if the cell has no code, False otherwise """ source = self.get_source(cell) return source is None or source == ''
class KernelMagics(Magics): _execution_count = 1 def stop_kernel(self): self.kc.stop_channels() self.km.shutdown_kernel(now=True) def __init__(self, shell): super(KernelMagics, self).__init__(shell) self.km = None self.kc = None self.comms = [] def start(self, kernel_name): self.km = KernelManager() self.km.kernel_name = kernel_name self.km.start_kernel(extra_arguments=[self._context_base64()]) atexit.register(self.stop_kernel) self.kc = self.km.client() self.kc.start_channels() try: self.kc.wait_for_ready() print("{} started successfully\n".format(kernel_name.capitalize())) except AttributeError: self._wait_for_ready_backport() def run_cell(self, line, code): if not self.km: self.start() self.kc.execute(code, allow_stdin=True) reply = self.kc.get_shell_msg() self._handle_iopub_messages() def _handle_iopub_messages(self): while True: try: msg = self.kc.get_iopub_msg(timeout=1) except Empty: break comm_id = msg['content'].get('comm_id') if comm_id and comm_id not in self.comms: self.comms.append(comm_id) self.shell.kernel.session.send(self.shell.kernel.iopub_socket, msg['msg_type'], msg['content'], metadata=msg['metadata'], parent=self.shell.kernel._parent_header, ident=msg.get('comm_id'), buffers=msg['buffers'], ) def pass_message(self, msg_raw): comm_id = msg_raw['content'].get('comm_id') if comm_id in self.comms: content = msg_raw['content'] msg = self.kc.session.msg(msg_raw['msg_type'], content) self.kc.shell_channel.send(msg) self._handle_iopub_messages() else: self.log.warn("No such comm: %s", comm_id) if self.log.isEnabledFor(logging.DEBUG): # don't create the list of keys if debug messages aren't enabled self.log.debug("Current comms: %s", list(self.comms.keys())) def _context_base64(self): context_json = json.dumps({ 'port': os.environ["BEAKERX_AUTOTRANSLATION_PORT"], 'contextId': get_ipython().kernel.session.session, }) return base64.b64encode(context_json.encode('utf-8')).decode()
class KernelMagics(Magics): _execution_count = 1 def stop_kernel(self): self.kc.stop_channels() self.km.shutdown_kernel(now=True) def __init__(self, shell): super(KernelMagics, self).__init__(shell) self.km = None self.kc = None self.comms = [] def start(self, kernel_name): self.km = KernelManager() self.km.kernel_name = kernel_name self.km.start_kernel(extra_arguments=[self._context_base64()]) atexit.register(self.stop_kernel) self.kc = self.km.client() self.kc.start_channels() try: self.kc.wait_for_ready() print("{} started successfully\n".format(kernel_name.capitalize())) except AttributeError: self._wait_for_ready_backport() def run_cell(self, line, code): if not self.km: self.start() self.kc.execute(code, allow_stdin=True) reply = self.kc.get_shell_msg() self._handle_iopub_messages() def _handle_iopub_messages(self): while True: try: msg = self.kc.get_iopub_msg(timeout=1) except Empty: break comm_id = msg['content'].get('comm_id') if comm_id and comm_id not in self.comms: self.comms.append(comm_id) self.shell.kernel.session.send( self.shell.kernel.iopub_socket, msg['msg_type'], msg['content'], metadata=msg['metadata'], parent=self.shell.kernel._parent_header, ident=msg.get('comm_id'), buffers=msg['buffers'], ) def pass_message(self, msg_raw): comm_id = msg_raw['content'].get('comm_id') if comm_id in self.comms: content = msg_raw['content'] msg = self.kc.session.msg(msg_raw['msg_type'], content) self.kc.shell_channel.send(msg) self._handle_iopub_messages() else: self.log.warn("No such comm: %s", comm_id) if self.log.isEnabledFor(logging.DEBUG): # don't create the list of keys if debug messages aren't enabled self.log.debug("Current comms: %s", list(self.comms.keys())) def _context_base64(self): context_json = json.dumps({ 'port': os.environ["BEAKERX_AUTOTRANSLATION_PORT"], 'contextId': get_ipython().kernel.session.session, }) return base64.b64encode(context_json.encode('utf-8')).decode()
class NotebookRunner(object): # The kernel communicates with mime-types while the notebook # uses short labels for different cell types. We'll use this to # map from kernel types to notebook format types. MIME_MAP = { 'image/jpeg': 'jpeg', 'image/png': 'png', 'text/plain': 'text', 'text/html': 'html', 'text/latex': 'latex', 'application/javascript': 'html', 'image/svg+xml': 'svg', } def __init__(self, nb, pylab=False, mpl_inline=False, working_dir=None): self.km = KernelManager() args = [] if pylab: args.append('--pylab=inline') logging.warn('--pylab is deprecated and will be removed in a ' 'future version') elif mpl_inline: args.append('--matplotlib=inline') logging.warn('--matplotlib is deprecated and will be removed in a ' 'future version') cwd = os.getcwd() if working_dir: os.chdir(working_dir) self.km.start_kernel(extra_arguments=args) os.chdir(cwd) if platform.system() == 'Darwin': # There is sometimes a race condition where the first # execute command hits the kernel before it's ready. # It appears to happen only on Darwin (Mac OS) and an # easy (but clumsy) way to mitigate it is to sleep # for a second. sleep(1) self.kc = self.km.client() self.kc.start_channels() try: self.kc.wait_for_ready() except AttributeError: # IPython < 3 self._wait_for_ready_backport() self.nb = nb def __del__(self): self.kc.stop_channels() self.km.shutdown_kernel(now=True) def run_cell(self, cell, cidx): ''' Run a notebook cell and update the output of that cell in-place. ''' logging.debug('running cell {}'.format(cidx)) # logging.debug(u'cell.input {}'.format(cell.input)) self.kc.execute(cell.source) reply = self.kc.get_shell_msg() status = reply['content']['status'] max_mem = system_memory_used() logging.info(' memory used: {}'.format(sizeof_fmt(max_mem))) if status == 'error': traceback_text = 'Cell raised uncaught exception: \n' + \ '\n'.join(reply['content']['traceback']) traceback_text = remove_ansicolor(traceback_text) if 'NoDataFound' not in traceback_text: logging.error(traceback_text) else: logging.debug('run_cell ok') outs = list() while True: try: msg = self.kc.get_iopub_msg(timeout=1) if msg['msg_type'] == 'status': if msg['content']['execution_state'] == 'idle': break except Empty: # execution state should return to idle before the queue # becomes empty, # if it doesn't, something bad has happened logging.error("empty exception") raise content = msg['content'] msg_type = msg['msg_type'] # IPython 3.0.0-dev writes pyerr/pyout in the notebook format but # uses error/execute_result in the message spec. This does the # translation needed for tests to pass with IPython 3.0.0-dev notebook3_format_conversions = { 'error': 'pyerr', 'execute_result': 'pyout' } msg_type = notebook3_format_conversions.get(msg_type, msg_type) out = NotebookNode(output_type=msg_type) #if 'execution_count' in content: #cell['prompt_number'] = content['execution_count'] #out.prompt_number = content['execution_count'] if msg_type in ('status', 'pyin', 'execute_input'): continue elif msg_type == 'stream': out.stream = content['name'] if 'text' in content: out.text = content['text'] else: out.text = content['data'] # print(out.text, end='') elif msg_type in ('display_data', 'pyout'): for mime, data in content['data'].items(): try: attr = self.MIME_MAP[mime] except KeyError: logging.error("unhandled mime") raise NotImplementedError('unhandled mime type: %s' % mime) setattr(out, attr, data) elif msg_type == 'pyerr': out.ename = content['ename'] out.evalue = content['evalue'] out.traceback = content['traceback'] elif msg_type == 'clear_output': outs = list() continue else: logging.error("unhandled iopub") raise NotImplementedError('unhandled iopub message: %s' % msg_type) outs.append(out) # NOTE: Ver 4 format still have 'pyout', Why? cell['outputs'] = upgrade_outputs(outs) logging.debug("status: {}".format(status)) if status == 'error': if 'NoDataFound' in traceback_text: raise NoDataFound(traceback_text.split('\n')[-1]) else: logging.debug(u"NotebookError raised") raise NotebookError(traceback_text) def iter_code_cells(self): ''' Iterate over the notebook cells containing code. ''' for cell in self.nb['cells']: if cell.cell_type == 'code': yield cell def iter_cells(self): ''' Iterate over the notebook cells. ''' for cell in self.nb['cells']: yield cell def clear_outputs(self): for cell in self.iter_cells(): if 'outputs' in cell: cell['outputs'] = [] @property def cellcnt(self): return len(self.nb['cells']) def run_notebook(self, memory_used=None, progress_cb=None, skip_exceptions=False): ''' Run all the cells of a notebook in order and update the outputs in-place. If ``skip_exceptions`` is set, then if exceptions occur in a cell, the subsequent cells are run (by default, the notebook execution stops). ''' cur = 0 if memory_used is not None: memory_used.append(system_memory_used()) for cell in self.iter_code_cells(): cur += 1 try: if progress_cb is not None: progress_cb(cur) self.run_cell(cell, cur) if memory_used is not None: memory_used.append(system_memory_used()) except NotebookError: if not skip_exceptions: raise
class BufferedKernelBase(Kernel): blocking_msg_types = [ 'execute_request', 'history_request', 'complete_request', 'inspect_request', 'kernel_info_request', 'comm_info_request', 'shutdown_request' ] proxy_channles = ['iopub', 'stdin'] threads = {} parent_headers = {} idle_event = Event() idle_parent_header = None keyboard_interrupt = False execute_request_msg_id = None log_file_object = None data_dir = Unicode() @default('data_dir') def _data_dir_default(self): app = None try: if JupyterApp.initialized(): app = JupyterApp.instance() except MultipleInstanceError: pass if app is None: # create an app, without the global instance app = JupyterApp() app.initialize(argv=[]) return app.data_dir server_signature_file = Unicode( help="""The file where the server signature is stored.""" ).tag(config=True) @default('server_signature_file') def _server_signature_file_default(self): if 'lc_nblineage_server_signature_path' in os.environ: return os.environ['lc_nblineage_server_signature_path'] if not self.data_dir: return '' return os.path.join(self.data_dir, 'server_signature') keyword_pattern_file_paths = List() @default('keyword_pattern_file_paths') def _keyword_pattern_file_paths_default(self): return [ os.path.join(self.get_notebook_path(), IPYTHON_DEFAULT_PATTERN_FILE), os.path.join(os.path.expanduser('~/'), IPYTHON_DEFAULT_PATTERN_FILE) ] log_dirs = List() @default('log_dirs') def _log_dirs_default(self): return [ os.path.join(self.get_notebook_path(), '.log'), os.path.expanduser('~/.log') ] configfile_paths = List() @default('configfile_paths') def _configfile_paths_default(self): return [ os.path.join(self.get_notebook_path(), '.lc_wrapper'), os.path.join(os.path.expanduser('~/'), '.lc_wrapper') ] def __init__(self, **kwargs): Kernel.__init__(self, **kwargs) if 'lc_wrapper_fluentd_host' in os.environ: fluentd_host = os.environ['lc_wrapper_fluentd_host'] fluentd_port = int(os.environ.get('lc_wrapper_fluentd_port', '24224')) fluentd_tag = os.environ.get('lc_wrapper_fluentd_tag', 'lc_wrapper') self.sender = sender.FluentSender(fluentd_tag, host=fluentd_host, port=fluentd_port) self.log.info('lc_wrapper: Enabled fluent logger: host=%s, port=%s, tag=%s', fluentd_host, fluentd_port, fluentd_tag) else: self.sender = None self._init_message_handler() self.start_ipython_kernel() def _init_message_handler(self): def handler(self, stream, ident, parent): self.log.debug("Received shell message: %s", str(parent)) msg_type = parent['msg_type'] content = parent['content'] self._hook_request_msg(parent) self.idle_event.clear() self.keyboard_interrupt = False msg = self.kc.session.msg(msg_type, content) msgid = msg['header']['msg_id'] self.log.debug("save parent_header: %s => %s", msgid, str(parent['header'])) self.parent_headers[msgid] = parent['header'] self.kc.shell_channel.send(msg) reply_msg = None if msg_type in self.blocking_msg_types: while True: try: reply_msg = self.kc._recv_reply(msgid, timeout=None) break except KeyboardInterrupt: self.log.debug("KeyboardInterrupt", exc_info=True) # propagate SIGINT to wrapped kernel self.km.interrupt_kernel() self.keyboard_interrupt = True # this timer fire when the ipython kernel didnot interrupt within 5.0 sec. self.timer = Timer(5.0, self.close_files) self.log.debug('>>>>> close files: timer fired') self.timer.start() reply_msg_content = self._hook_reply_msg(reply_msg) self.log.debug('reply: %s', reply_msg) reply_msg = self.session.send(stream, reply_msg['msg_type'], reply_msg_content, parent, ident, header=reply_msg['header'], metadata=reply_msg['metadata'], buffers=reply_msg['buffers']) self._post_send_reply_msg(parent, reply_msg) self._wait_for_idle(msgid) self._post_wait_for_idle(parent, reply_msg) for msg_type in self.msg_types: if msg_type == 'kernel_info_request': continue if msg_type == 'shutdown_request': continue self.log.debug('override shell message handler: msg_type=%s', msg_type) if PY3: setattr(self, msg_type, MethodType(handler, self)) else: setattr(self, msg_type, MethodType(handler, self, type(self))) self.shell_handlers[msg_type] = getattr(self, msg_type) comm_msg_types = ['comm_open', 'comm_msg', 'comm_close'] for msg_type in comm_msg_types: self.log.debug('init shell comm message handler: msg_type=%s', msg_type) if PY3: setattr(self, msg_type, MethodType(handler, self)) else: setattr(self, msg_type, MethodType(handler, self, type(self))) self.shell_handlers[msg_type] = getattr(self, msg_type) def start_ipython_kernel(self): kernel_name = self._get_wrapped_kernel_name() self.km = KernelManager(kernel_name=kernel_name, client_class='jupyter_client.blocking.BlockingKernelClient') self.log.debug('kernel_manager: %s', str(self.km)) self.log.info('start wrapped kernel: %s', kernel_name) self.km.start_kernel() self.kc = self.km.client() self.log.debug('kernel_client: %s', str(self.kc)) self.log.debug('start_channels') self.kc.start_channels() try: self.log.debug('wait for ready of wrapped kernel') self.kc.wait_for_ready(timeout=None) except RuntimeError: self.kc.stop_channels() self.km.shutdown_kernel() raise for channel in self.proxy_channles: stream = getattr(self, channel + '_socket') thread = ChannelReaderThread(self, self.kc, stream, self.session, channel) thread.start() self.threads[channel] = thread for log_dir in self.log_dirs: if self._is_writable_dir(log_dir): self.log_path = log_dir break self.log.debug('log output directory: %s', self.log_path) if self._find_default_keyword_pattern_file() is None: self.log.info('default keyword pattern file "%s" not found', IPYTHON_DEFAULT_PATTERN_FILE) try: self._generate_default_keyword_pattern_file() except Exception as e: self.log.exception("failed to generate default keyword pattern file: %s", e) self.exec_info = None self.notebook_path = self.get_notebook_path() self.log.debug('notebook_path: %s', self.notebook_path) def _is_writable_dir(self, path): temp_dir = None try: if not os.path.exists(path): os.makedirs(path) temp_dir = tempfile.mkdtemp(dir=path) return True except (OSError, IOError) as e: self.log.debug("_is_writable_dir: %s", e) return False finally: if temp_dir is not None: os.rmdir(temp_dir) def _get_wrapped_kernel_name(self, km): raise NotImplementedError() def _remove_parent_header(self, msg_id): if msg_id in self.parent_headers: parent_header = self.parent_headers[msg_id] self.log.debug("remove parent_header: %s => %s", msg_id, str(parent_header)) del self.parent_headers[msg_id] def _hook_request_msg(self, parent): msg_type = parent['msg_type'] if msg_type == 'execute_request': self._hook_execute_request_msg(parent) def _hook_execute_request_msg(self, parent): try: content = parent[u'content'] code = py3compat.cast_unicode_py2(content[u'code']) silent = content[u'silent'] allow_stdin = content.get('allow_stdin', False) except: self.log.error("Got bad msg: ") self.log.error("%s", parent) return self.execute_request_msg_id = parent['header']['msg_id'] if not silent: self.execution_count += 1 cell_full_id = self._get_cell_id(parent) if cell_full_id is not None: cell_uuid, _ = self._parse_cell_id(cell_full_id) self.log_history_file_path = os.path.join(self.log_path, cell_uuid, cell_uuid + u'.json') else: self.log_history_file_path = None self.log_history_id = cell_full_id self.log_history_data = self._read_log_history_file() notebook_data = self._get_notebook_data(parent) self.exec_info = ExecutionInfo(code, self.get_server_signature(), notebook_data) if not silent: env = self._get_config() self.summarize_on, new_code = self.is_summarize_on(code, env) self._init_default_config() self._start_log() if self.summarize_on: self._start_summarize() self._load_env(env) if not self.log_history_id is None: meme = {'lc_cell_meme': {'current': self.log_history_id}} self.log_buff_append(u'{}\n----\n'.format(json.dumps(meme))) self.log_buff_append(u'{}\n----\n'.format(code)) # code self._log_buff_flush() self.log_buff_append(self.exec_info.to_logfile_header() + u'----\n') content[u'code'] = new_code self._allow_stdin = allow_stdin def _hook_reply_msg(self, reply_msg): if reply_msg['msg_type'] == 'execute_reply': return self._hook_execute_reply_msg(reply_msg) return reply_msg['content'] def _hook_execute_reply_msg(self, reply): if hasattr(self, "timer"): self.timer.cancel() self.log.debug('>>>>> close files: timer cancelled') content = reply['content'] content['execution_count'] = self.execution_count content['lc_wrapper'] = { 'log_path': self.file_full_path } self.exec_info.execute_reply_status = content['status'] return content def _post_send_reply_msg(self, parent, reply_msg): msg_type = parent['msg_type'] if msg_type == 'execute_request': content = parent['content'] silent = content['silent'] stop_on_error = content.get('stop_on_error', True) if not silent and reply_msg['content']['status'] == u'error' and stop_on_error: self._abort_queues() def _post_wait_for_idle(self, parent, reply_msg): if reply_msg is None: return if reply_msg['msg_type'] == 'execute_reply': self.log.debug('flushing stdout stream') self._send_last_stdout_stream_text() self.log.debug('flushed stdout stream') self.execute_request_msg_id = None def _hook_iopub_msg(self, parent_header, msg): msg_id = parent_header['msg_id'] content = msg['content'] # replace msg_id in the content self._replace_msg_id(msg_id, msg['parent_header']['msg_id'], content) if self.execute_request_msg_id == msg_id: return self._output_hook(msg) return content def _replace_msg_id(self, msg_id, wrapped_msg_id, content): for k, v in content.items(): if isinstance(v, dict): self._replace_msg_id(msg_id, wrapped_msg_id, v) elif v == wrapped_msg_id: content[k] = msg_id self.log.debug('replace msg_id in content: %s => %s', wrapped_msg_id, msg_id) def _write_log(self, msg): if not msg is None: self.log_file_object.write(msg) self.exec_info.file_size = self.log_file_object.tell() def open_log_file(self, path): self.log.debug('>>>>> open_log_file') now = datetime.now(dateutil.tz.tzlocal()) path = os.path.join(path, now.strftime("%Y%m%d")) if not os.path.exists(path): os.makedirs(path) file_name = now.strftime("%Y%m%d-%H%M%S") + "-%04d" % (now.microsecond // 1000) self.file_full_path = os.path.join(path, file_name + u'.log') self.exec_info.log_path = self.file_full_path self.log_file_object = io.open(self.file_full_path, "a", encoding='utf-8') self.log.debug(self.file_full_path) self.log.debug(self.log_file_object) def close_log_file(self): self.log.debug('>>>>> close_log_file') if self.log_file_object is None: self.log.debug('>>>>> close_log_file: not executed because self.log_file_object is None') return if not self.log_file_object.closed: self.log.debug('>>>>> log file closed') self.log_file_object.close() self.send_fluent_log() else: self.log.debug('>>>>> close_log_file: not executed because self.log_file_object is already closed') self.log.debug('close_log_file: self.log_file_object = None') self.log_file_object = None def send_fluent_log(self): if self.sender is None: return self.log.debug('>>>>> send_fluent_log') record = {} with io.open(self.exec_info.log_path, 'r') as f: record['log'] = f.read() self.sender.emit(None, record) self.log.info('lc_wrapper: send_fluent_log: cell_meme=%s, uid=%s, gid=%s', self.log_history_id, os.getuid(), os.getgid(), self.get_server_signature()) def get_server_signature(self): if os.path.exists(self.server_signature_file): with io.open(self.server_signature_file, 'r') as f: return f.read() else: return None def _wait_for_idle(self, msg_id): self.log.debug('waiting for idle: msg_id=%s', msg_id) while True: self.idle_event.wait() if self.idle_parent_header['msg_id'] != msg_id: self.log.warn('unexpected idle message received: expected msg_id=%s, received msg_id=%s', msg_id, self.idle_parent_header['msg_id']) continue self.log.debug('idle: msg_id=%s', msg_id) return def get_notebook_path(self): return getcwd() def _find_config_file(self): for path in self.configfile_paths: if os.path.exists(path): return path return None def _get_config(self): env = os.environ config_path = self._find_config_file() if config_path is None: return env line_pattern = re.compile(r'(\S+)=(".*?"|\S+)') config = {} with io.open(config_path, 'r', encoding='utf-8') as f: for l in f.readlines(): l = l.strip() if len(l) == 0 or l.startswith('#'): continue m = line_pattern.match(l) if m: config[m.group(1)] = m.group(2) else: self.log.warning('Unexpected line: {} at {}'.format(l, config_path)) for k, v in env.items(): config[k] = v return config def send_clear_content_msg(self): clear_content = {'wait': True} self.session.send(self.iopub_socket, 'clear_output', clear_content, self._parent_header, ident=None, buffers=None, track=False, header=None, metadata=None) def _load_env(self, env): summarize = env.get(SUMMARIZE_KEY, '') self.log.debug("lc_wrapper = " + summarize) summarize_pattern = re.compile(r'^([0-9]*):([0-9]*):([0-9]*):([0-9]*)$') summarize_params = summarize_pattern.match(summarize) if summarize_params is not None and len(summarize_params.group(1)) != 0: self.summarize_start_lines = int(summarize_params.group(1)) if summarize_params is not None and len(summarize_params.group(2)) != 0: self.summarize_header_lines = int(summarize_params.group(2)) if summarize_params is not None and len(summarize_params.group(3)) != 0: self.summarize_exec_lines = int(summarize_params.group(3)) if summarize_params is not None and len(summarize_params.group(4)) != 0: self.summarize_footer_lines = int(summarize_params.group(4)) self.summarize_start_lines = max(self.summarize_start_lines, self.summarize_header_lines + \ self.summarize_footer_lines + 1) self.log_history_data = self._read_log_history_file() if MASKING_KEY in env: self.masking_pattern = re.compile(env.get(MASKING_KEY)) else: self.masking_pattern = None if LOG_MASKING_KEY in env: self.log_mask = env.get(LOG_MASKING_KEY) else: self.log_mask = 'on' self.repatter = [] text = env.get(IGNORE_SUMMARIZE_KEY, 'file:default') if text is None or len(text) == 0: pass elif 'file:' in text: file_name = text[text.rfind('find:')+6:].strip() if file_name == 'default': file_path = self._find_default_keyword_pattern_file() else: file_path = os.path.join(self.notebook_path, file_name) if file_path is None: self.keyword_buff_append(u'error : {} Not found'.format(IPYTHON_DEFAULT_PATTERN_FILE)) self.log.warning('lc_wrapper_regex: %s Not found', IPYTHON_DEFAULT_PATTERN_FILE) elif os.path.exists(file_path): try: patterns = self._read_keyword_pattern_file(file_path) for ptxt in patterns: self.repatter.append(re.compile(ptxt)) except Exception as e: self.keyword_buff_append(u'error : ' + str(e)) self.log.exception("lc_wrapper_regex: %s", e) else: self.keyword_buff_append(u'error : {} Not found'.format(file_path)) self.log.warning('lc_wrapper_regex: %s Not found', file_path) else: try: self.repatter.append(re.compile(text)) except Exception as e: self.keyword_buff_append(u'error : ' + str(e)) self.log.exception("lc_wrapper_regex: %s", e) def _find_default_keyword_pattern_file(self): for path in self.keyword_pattern_file_paths: if os.path.exists(path): return path return None def _read_keyword_pattern_file(self, filename): with open(filename, 'r') as file: patterns = file.readlines() patterns = [x.strip() for x in patterns if len(x.strip()) > 0] self.log.debug('patterns :') for patt in patterns: self.log.debug(patt) return patterns def _generate_default_keyword_pattern_file(self): error = None self.log.info('generate default keyword pattern file') for path in self.keyword_pattern_file_paths: if not os.path.exists(path): try: with open(path, 'w') as f: f.write(IPYTHON_DEFAULT_PATTERN) self.log.info('generated default keyword pattern file: %s', path) return except Exception as e: self.log.debug('_generate_default_keyword_pattern_file: %s', str(e)) error = e if error is not None: raise error def is_summarize_on(self, code, env): force = None if FORCE_SUMMARIZE_KEY in env: force_text = env[FORCE_SUMMARIZE_KEY].strip().lower() if force_text == 'on': force = True elif force_text == 'off': force = False regx = r'^\s*!!' m = re.match(regx, code, re.M) if m: return (force if force is not None else True, code[m.end():]) else: return (force if force is not None else False, code) def _log_buff_flush(self, force=False): if force or len(self.log_buff) > 100: self._write_log(u''.join(self.log_buff)) del self.log_buff[:] def log_buff_append(self, text=None): if self.block_messages: return if not text is None: if isinstance(text, list): self.log_buff.extend(text) else: self.log_buff.append(text) def keyword_buff_append(self, text, highlight=True): if isinstance(text, list): self.keyword_buff.extend([u'\033[0;31m{}\033[0m'.format(t) if highlight else t for t in text]) else: self.keyword_buff.append(u'\033[0;31m{}\033[0m'.format(text) if highlight else text) def display_keyword_buff(self): if len(self.keyword_buff) == 0: return '' stream_text = u'...\n' stream_text += u'\n'.join(self.keyword_buff[:self.summarize_header_lines * 2]) + '\n' if len(self.keyword_buff) <= self.summarize_header_lines * 2: return stream_text msg = u'Matched lines exceed maximum number of view ({})' \ .format(self.summarize_header_lines * 2) stream_text += u'\033[0;31m{}\033[0m\n'.format(msg) return stream_text def highlight_keywords(self, text): matched = [p.search(text) for p in self.repatter] matched = [m for m in matched if m is not None] if len(matched) == 0: return None remain = text result = None while len(matched) > 0: left = min([m.start() for m in matched]) if result is None: result = remain[:left] else: result += remain[:left] keywords = [m.group() for m in matched if m.start() == left] keyword = sorted(keywords, key=lambda s: len(s))[-1] result += u'\033[0;31m{}\033[0m'.format(keyword) remain = remain[left + len(keyword):] matched = [p.search(remain) for p in self.repatter] matched = [m for m in matched if m is not None] return result + remain def _read_log_history_file(self): if self.log_history_file_path is not None and \ os.path.exists(self.log_history_file_path): with open(self.log_history_file_path, 'r') as f: data = json.load(f) return data else: return [] def _write_log_history_file(self, data): if self.log_history_file_path is None: self.log.debug('Skipped to save log history') return data.append(self.exec_info.to_log()) pathdir = os.path.dirname(self.log_history_file_path) if not os.path.exists(pathdir): os.makedirs(pathdir) log_full_dir, log_filename = os.path.split(self.file_full_path) log_full_dir, log_dirname = os.path.split(log_full_dir) os.symlink(os.path.join('..', log_dirname, log_filename), os.path.join(pathdir, os.path.basename(self.file_full_path))) with open(self.log_history_file_path, 'w') as f: json.dump(data, f) self.log.debug('Log history saved: {}'.format(self.log_history_file_path)) self.log_history_file_path = None def close_files(self): self.log.debug('>>>>> close_files') if self.log_file_object is not None: self.exec_info.finished(len(self.keyword_buff)) self.log_buff_append(u'\n----\n{}----\n'.format(self.exec_info.to_logfile_footer())) for result in self.result_files: self.log_buff_append(u'result: {}\n'.format(result)) self.log_buff_append(u'execute_reply_status: {}\n'.format(self.exec_info.execute_reply_status)) self.block_messages = True self._log_buff_flush(force=True) self.close_log_file() #save log file path self._write_log_history_file(self.log_history_data) def _init_default_config(self): self.summarize_start_lines = 50 self.summarize_header_lines = 20 self.summarize_exec_lines = 1 self.summarize_footer_lines = 20 def _start_summarize(self): self.count = 0 self.summarize_header_buff = [] self.summarize_last_buff = [] def _start_log(self): self.block_messages = False self.log_buff = [] self.keyword_buff = [] self.result_files = [] self.file_full_path = None self.log_file_object = None self.open_log_file(self.log_path) def _store_result(self, result): if self.file_full_path is None: self.log.error('Log file already closed. Skip to store results') return log_dir, log_name = os.path.split(self.file_full_path) log_name_body, _ = os.path.splitext(log_name) result_file = os.path.join(log_dir, u'{}-{}.pkl'.format(log_name_body, len(self.result_files))) with open(result_file, 'wb') as f: pickle.dump(result, f) self.result_files.append(result_file) def _store_last_lines(self, content_text_list): # save the last few lines lines = max(self.summarize_footer_lines, self.summarize_start_lines) if len(content_text_list) < lines: if len(content_text_list) + len(self.summarize_last_buff) > lines: del self.summarize_last_buff[:len(content_text_list)] self.summarize_last_buff.extend(content_text_list) else: del self.summarize_last_buff[:] self.summarize_last_buff.extend(content_text_list[-lines:]) def _output_hook(self, msg=None): msg_type = msg['header']['msg_type'] content = msg['content'] if msg_type == 'stream': if 'ExecutionResult' in content['text']: return content else: masked_text = self._mask_lines(content['text']) if self.log_mask == 'on': self.log_buff_append(masked_text) else : self.log_buff_append(content['text']) self._log_buff_flush() content['text'] = masked_text content_text_list = content['text'].splitlines(False) # with LF # save the stderr messages if content['name'] == 'stderr': self.keyword_buff_append(content_text_list) # save the sentences the keyword matched elif not self.repatter is None and len(self.repatter) > 0: for text in content_text_list: matched = self.highlight_keywords(text) if matched is not None: self.keyword_buff_append(matched, highlight=False) if self.summarize_on: return self._summarize_stream_output(msg, content, content_text_list) return content elif msg_type in ('display_data', 'execute_result'): execute_result = content.copy() execute_result['execution_count'] = self.execution_count self._store_result({'msg_type': msg_type, 'content': execute_result}) return execute_result elif msg_type == 'error': error_result = content.copy() error_result['execution_count'] = self.execution_count if self.log_mask != 'on': self._store_result({'msg_type': msg_type, 'content': error_result}) for i in range(len(error_result['traceback'])): error_result['traceback'][i] = self._mask_lines(error_result['traceback'][i]) error_result['evalue'] = self._mask_lines(error_result['evalue']) if self.log_mask == 'on': self._store_result({'msg_type': msg_type, 'content': error_result}) return error_result return content def _summarize_stream_output(self, msg, content, lines): # save the first few lines if len(self.summarize_header_buff) < self.summarize_header_lines: self.summarize_header_buff.extend(lines) self._store_last_lines(lines) if self.count < self.summarize_start_lines: self.count += len(lines) stream_content = {'name': content['name'], 'text': content['text']} else: self._log_buff_flush() self.send_clear_content_msg() stream_text = u'' stream_text += self.exec_info.to_stream() + u'----\n' stream_text += u'{}\n'.format('\n'.join(self.summarize_header_buff[:self.summarize_header_lines])) stream_text += self.display_keyword_buff() stream_text += u'...\n' stream_text += u'{}'.format('\n'.join(lines[:self.summarize_exec_lines])) stream_content = {'name': 'stdout', 'text': stream_text} return stream_content def _send_last_stdout_stream_text(self): self.log.debug('_flush_stdout_stream') self.close_files() if self.summarize_on: self._send_last_summarized_stdout_stream_text() self.result_files = [] def _send_last_summarized_stdout_stream_text(self): self.send_clear_content_msg() stream_text = u'' stream_text += self.exec_info.to_stream(len(self.log_history_data)) + u'----\n' if self.count < self.summarize_start_lines: stream_text += u'\n'.join(self.summarize_last_buff) else: stream_text += u'{}\n'.format('\n'.join(self.summarize_header_buff[:self.summarize_header_lines])) stream_text += self.display_keyword_buff() stream_text += u'...\n' stream_text += u'{}'.format('\n'.join(self.summarize_last_buff[-self.summarize_footer_lines:])) stream_content = {'name': 'stdout', 'text': stream_text} self.send_response(self.iopub_socket, 'stream', stream_content) # Send exeuction result again because last result can be cleared for resultf in self.result_files: with open(resultf, 'rb') as f: result = pickle.load(f) self.session.send(self.iopub_socket, result['msg_type'], result['content'], self._parent_header, ident=None, buffers=None, track=False, header=None, metadata=None) def _mask_lines(self, string): if not hasattr(self, 'masking_pattern'): return string elif self.masking_pattern is None: return string else: pattern = self.masking_pattern def asterisks_repl(match): return len(match[0]) * '*' return re.sub(pattern, asterisks_repl, string) def _get_cell_id(self, parent): if 'content' not in parent: return None content = parent['content'] if 'lc_cell_data' not in content: return None lc_cell_data = content['lc_cell_data'] if 'lc_cell_meme' not in lc_cell_data: return None lc_cell_meme = lc_cell_data['lc_cell_meme'] if 'current' not in lc_cell_meme: return None return lc_cell_meme['current'] def _parse_cell_id(self, cell_id): parts = cell_id.split('-') return '-'.join(parts[:5]), '-'.join(parts[5:]) def _get_notebook_data(self, parent): if 'content' not in parent: return None content = parent['content'] if 'lc_notebook_data' not in content: return None return content['lc_notebook_data'] def do_shutdown(self, restart): self.log.debug('>>>>> do_shutdown') self.close_files() if self.sender is not None: self.log.debug('close fluent logger sender') self.sender.close() self.log.info('stopping wrapped kernel') if hasattr(self, "km"): self.km.shutdown_kernel(restart=restart) for channel, thread in self.threads.items(): self.log.info('stopping %s ChannelReaderThread', channel) thread.stop() return {'status': 'ok', 'restart': restart}
class GroovyMagics(Magics): _execution_count = 1 def stop_kernel(self): self.kc.stop_channels() self.km.shutdown_kernel(now=True) def __init__(self, shell): super(GroovyMagics, self).__init__(shell) self.km = None self.kc = None self.comms = [] def start(self): self.km = KernelManager() self.km.kernel_name = 'groovy' self.km.start_kernel() atexit.register(self.stop_kernel) self.kc = self.km.client() self.kc.start_channels() try: self.kc.wait_for_ready() print("Groovy started successfully\n") except AttributeError: self._wait_for_ready_backport() def run_cell(self, line, code): if not self.km: self.start() self.kc.execute(code, allow_stdin=True) reply = self.kc.get_shell_msg() self._handle_iopub_messages() def _handle_iopub_messages(self): while True: try: msg = self.kc.get_iopub_msg(timeout=1) except Empty: break comm_id = msg['content'].get('comm_id') if comm_id and comm_id not in self.comms: self.comms.append(comm_id) self.shell.kernel.session.send( self.shell.kernel.iopub_socket, msg['msg_type'], msg['content'], metadata=msg['metadata'], parent=self.shell.kernel._parent_header, ident=msg.get('comm_id'), buffers=msg['buffers'], ) def pass_message(self, msg_raw): comm_id = msg_raw['content'].get('comm_id') if comm_id in self.comms: content = msg_raw['content'] msg = self.kc.session.msg(msg_raw['msg_type'], content) self.kc.shell_channel.send(msg) self._handle_iopub_messages() else: self.log.warn("No such comm: %s", comm_id) if self.log.isEnabledFor(logging.DEBUG): # don't create the list of keys if debug messages aren't enabled self.log.debug("Current comms: %s", list(self.comms.keys())) @cell_magic def groovy(self, line, cell): return self.run_cell(line, cell)
class IPyKernel(object): """ A simple wrapper class to run cells in an IPython Notebook. Notes ----- - Use `with` construct to properly instantiate - IPython 3.0.0+ is assumed for this version """ def __init__(self, nb_version=4, extra_arguments=None): # default timeout time is 60 seconds self.default_timeout = 60 if extra_arguments is None: extra_arguments = [] self.extra_arguments = extra_arguments self.nb_version = nb_version def __enter__(self): self.km = KernelManager() self.km.start_kernel(extra_arguments=self.extra_arguments, stderr=open(os.devnull, 'w')) self.kc = self.km.client() self.kc.start_channels() self.iopub = self.kc.iopub_channel self.shell = self.kc.shell_channel # run %pylab inline, because some notebooks assume this # even though they shouldn't self.shell.send("pass") self.shell.get_msg() while True: try: self.iopub.get_msg(timeout=0.05) except Exception as e: if repr(e) == 'Empty()': break # we got a real error so raise it raise self.cmd_list = [] self.msg_list = {} return self def clear(self): self.iopub.get_msgs() def execute(self, cmd): uid = self.kc.execute(cmd) self.cmd_list.append((uid, cmd)) return uid def __exit__(self, exc_type, exc_val, exc_tb): self.kc.stop_channels() self.km.shutdown_kernel() del self.msg_list del self.cmd_list del self.km def listen(self, uid, use_timeout=None): if use_timeout is None: use_timeout = self.default_timeout while True: if uid in self.msg_list and len(self.msg_list[uid]) > 0: return self.msg_list[uid].pop(0) msg = self.iopub.get_msg(timeout=use_timeout) if 'msg_id' in msg['parent_header']: msg_uid = msg['parent_header']['msg_id'] if msg_uid not in self.msg_list: self.msg_list[msg_uid] = [] self.msg_list[msg_uid].append(msg) def run(self, cell, use_timeout=None): """ Run a notebook cell in the IPythonKernel Parameters ---------- cell : IPython.notebook.Cell the cell to be run use_timeout : int or None (default) the time in seconds after which a cell is stopped and assumed to have timed out. If set to None the value in `default_timeout` is used Returns ------- list of ex_cell_outputs a list of NotebookNodes of the returned types. This is similar to the list of outputs generated when a cell is run """ if timeout is not None: use_timeout = use_timeout else: use_timeout = self.default_timeout if hasattr(cell, 'source'): uid = self.execute(cell.source) else: raise AttributeError('No source/input key') outs = [] stdout_cells = {} while True: msg = self.listen(uid, use_timeout) msg_type = msg['msg_type'] if msg_type == 'execute_input': continue elif msg_type == 'clear_output': outs = [] continue elif msg_type == 'status': if msg['content']['execution_state'] == 'idle': # we are done with the cell, let's compare break continue out_cell = nbformat.NotebookNode(output_type=msg_type) content = msg['content'] if msg_type == 'stream': name = content['name'] if name not in stdout_cells: out_cell.name = name out_cell.text = content['text'] stdout_cells[name] = out_cell outs.append(out_cell) else: # we already have a stdout cell, so append to it stdout_cells[name].text += content['text'] elif msg_type in ('display_data', 'execute_result'): if hasattr(content, 'execution_count'): out_cell['execution_count'] = content['execution_count'] else: out_cell['execution_count'] = None out_cell['data'] = content['data'] out_cell['metadata'] = content['metadata'] outs.append(out_cell) elif msg_type == 'error': out_cell.ename = content['ename'] out_cell.evalue = content['evalue'] out_cell.traceback = content['traceback'] outs.append(out_cell) elif msg_type.startswith('comm_'): # messages used to initialize, close and unpdate widgets # we will ignore these and hope for the best pass else: tv.warning("Unhandled iopub msg of type `%s`" % msg_type) return outs def get_commands(self, cell): """ Extract potential commands from the first line of a cell if a code cell starts with the hashbang `#!` it can be followed by a comma separated list of commands. Each command can be 1. a single key `skip`, or 2. a key/value pair separated by a colon `timeout:[int]` Parameters ---------- cell : a NotebookCell the cell to be examined Returns ------- dict a dict of key/value pairs. For a single command the value is `True` """ commands = {} source = cell.source if source is not None: lines = source.splitlines() if len(lines) > 0: n_line = 0 line = lines[n_line].strip() while line.startswith('#!') or len(line) == 0: txt = line[2:].strip() parts = txt.split(',') for part in parts: subparts = part.split(':') if len(subparts) == 1: commands[subparts[0].strip().lower()] = True elif len(subparts) == 2: commands[subparts[0].strip().lower()] = subparts[1] n_line += 1 line = lines[n_line] return commands def is_empty_cell(self, cell): """ Check if a cell has no code Parameters ---------- cell : a NotebookCell the cell to be examined Returns ------- bool True if the cell has no code, False otherwise """ return not bool(cell.source)
class GroovyMagics(Magics): _execution_count = 1 def __init__(self, shell): super(GroovyMagics, self).__init__(shell) self.km = KernelManager() self.km.kernel_name = 'groovy' self.km.start_kernel() self.kc = self.km.client() self.kc.start_channels() try: self.kc.wait_for_ready() print("Groovy started successfully\n") except AttributeError: self._wait_for_ready_backport() def __del__(self): self.kc.stop_channels() self.km.shutdown_kernel(now=True) def run_cell(self, line, code): self.kc.execute(code, allow_stdin=True) reply = self.kc.get_shell_msg() status = reply['content']['status'] outs = list() while True: try: msg = self.kc.get_iopub_msg(timeout=1) if msg['msg_type'] == 'status': if msg['content']['execution_state'] == 'idle': break except Empty: print("empty ?!") raise content = msg['content'] msg_type = msg['msg_type'] notebook3_format_conversions = { 'error': 'pyerr', 'execute_result': 'pyout' } msg_type = notebook3_format_conversions.get(msg_type, msg_type) out = NotebookNode(output_type=msg_type) if msg_type == 'pyout': print(content['data']['text/plain']) continue if msg_type in ('status', 'pyin', 'execute_input'): continue elif msg_type in ('comm_open', 'comm_msg', 'comm_close'): # TODO handle this msg ?!?!?! continue elif msg_type == 'stream': out.stream = content['name'] if 'text' in content: out.text = content['text'] else: out.text = content['data'] elif msg_type in ('display_data', 'pyout'): for mime, data in content['data'].items(): try: attr = self.MIME_MAP[mime] except KeyError: print("unhandled mime") raise NotImplementedError('unhandled mime type: %s' % mime) setattr(out, attr, data) elif msg_type == 'pyerr': out.ename = content['ename'] out.evalue = content['evalue'] out.traceback = content['traceback'] elif msg_type == 'clear_output': outs = list() continue else: print("unhandled " + msg_type) raise NotImplementedError('unhandled iopub message: %s' % msg_type) outs.append(out) # NOTE: Ver 4 format still have 'pyout', Why? # upgrade_outputs(outs) print(str(outs)) print("status: {}".format(status)) @cell_magic def groovy(self, line, cell): return self.run_cell(line, cell)