def get_connection_file(app=None): """Return the path to the connection file of an app Parameters ---------- app : IPKernelApp instance [optional] If unspecified, the currently running app will be used """ if app is None: from IPython.kernel.zmq.kernelapp import IPKernelApp if not IPKernelApp.initialized(): raise RuntimeError("app not specified, and not in a running Kernel") app = IPKernelApp.instance() return filefind(app.connection_file, ['.', app.profile_dir.security_dir])
def start(self): if self._timer is not None: raise Exception("IPython kernel is already running.") # The IPKernelApp initialization is based on the IPython source for # IPython.embed_kernel available here: # https://github.com/ipython/ipython/blob/rel-3.2.1/IPython/kernel/zmq/embed.py if IPKernelApp.initialized(): app = IPKernelApp.instance() else: app = IPKernelApp.instance( outstream_class='ipyida.kernel.IDATeeOutStream' ) app.initialize() main = app.kernel.shell._orig_sys_modules_main_mod if main is not None: sys.modules[app.kernel.shell._orig_sys_modules_main_name] = main # IPython <= 3.2.x will send exception to sys.__stderr__ instead of # sys.stderr. IDA's console will not be able to display exceptions if we # don't send it to IDA's sys.stderr. To fix this, we call both the # ipython's and IDA's excepthook (IDA's excepthook is actually Python's # default). sys.excepthook = wrap_excepthook(sys.excepthook) # Load the calling scope (ida_module, ida_locals) = IPython.utils.frame.extract_module_locals(1) if 'idaapi' not in ida_locals: raise Exception("{0:s} must be called from idapythonrc.py or " "IDA's prompt.".format("IPythonKernel.start")) app.kernel.user_module = ida_module app.kernel.user_ns = ida_locals app.shell.set_completer_frame() app.kernel.start() app.kernel.do_one_iteration() self.connection_file = app.connection_file def ipython_kernel_iteration(): app.kernel.do_one_iteration() return int(1000 * app.kernel._poll_interval) self._timer = idaapi.register_timer(int(1000 * app.kernel._poll_interval), ipython_kernel_iteration)
def bind_kernel(**kwargs): """Bind an Engine's Kernel to be used as a full IPython kernel. This allows a running Engine to be used simultaneously as a full IPython kernel with the QtConsole or other frontends. This function returns immediately. """ from IPython.kernel.zmq.kernelapp import IPKernelApp from IPython.parallel.apps.ipengineapp import IPEngineApp # first check for IPKernelApp, in which case this should be a no-op # because there is already a bound kernel if IPKernelApp.initialized() and isinstance(IPKernelApp._instance, IPKernelApp): return if IPEngineApp.initialized(): try: app = IPEngineApp.instance() except MultipleInstanceError: pass else: return app.bind_kernel(**kwargs) raise RuntimeError("bind_kernel be called from an IPEngineApp instance")
def fork_kernel(self, config, pipe, resource_limits, logfile): """ A function to be set as the target for the new kernel processes forked in ForkingKernelManager.start_kernel. This method forks and initializes a new kernel, uses the update_function to update the kernel's namespace, sets resource limits for the kernel, and sends kernel connection information through the Pipe object. :arg IPython.config.loader config: kernel configuration :arg multiprocessing.Pipe pipe: a multiprocessing connection object which will send kernel ip, session, and port information to the other side :arg dict resource_limits: a dict with keys resource.RLIMIT_* (see config_default documentation for explanation of valid options) and values of the limit for the given resource to be set in the kernel process """ os.setpgrp() logging.basicConfig(filename=self.filename,format=str(uuid.uuid4()).split('-')[0]+': %(asctime)s %(message)s',level=logging.DEBUG) logging.debug("kernel forked; now starting and configuring") try: ka = IPKernelApp.instance(config=config, ip=config["ip"]) from namespace import InstrumentedNamespace ka.user_ns = InstrumentedNamespace() ka.initialize([]) except: logging.exception("Error initializing IPython kernel") try: if self.update_function is not None: self.update_function(ka) except: logging.exception("Error configuring up kernel") logging.debug("finished updating") for r, limit in resource_limits.iteritems(): resource.setrlimit(getattr(resource, r), (limit, limit)) pipe.send({"ip": ka.ip, "key": ka.session.key, "shell_port": ka.shell_port, "stdin_port": ka.stdin_port, "hb_port": ka.hb_port, "iopub_port": ka.iopub_port}) pipe.close() ka.start()
def _BlockOnRawInputReplyZMQ(): """Blocks until a message in the stdin channel is recieved. Returns: The value of the message, which is assumed to be of type raw_input_reply """ # pylint: disable=g-import-not-at-top from IPython.kernel.zmq.kernelapp import IPKernelApp app = IPKernelApp.instance() kernel = app.kernel stdin_socket = kernel.stdin_socket while True: try: _, reply = kernel.session.recv(stdin_socket, 0) except Exception: # Handle invalid message pass except KeyboardInterrupt: # re-raise KeyboardInterrupt, to truncate traceback raise KeyboardInterrupt else: break try: value = reply['content']['value'] except KeyError: # handle bad raw_input reply value = '' return value
def start(): with xvfb.autostart(): # FIXME: logs go to nowhere init_qt_app(verbose=False) kernel = IPKernelApp.instance(kernel_class=SplashKernel) kernel.initialize() kernel.kernel.eventloop = loop_qt4 kernel.start()
def pylab_kernel(gui): """Launch and return an IPython kernel with pylab support for the desired gui """ kernel = IPKernelApp.instance() kernel.initialize(['python', '--pylab=%s' % gui, #'--log-level=10' ]) return kernel
def mpl_kernel(gui): """Launch and return an IPython kernel with pylab (matplotlib and numpy) support for the desired gui """ kernel = IPKernelApp.instance() kernel.initialize(['python', '--pylab=%s' % gui,#'--matplotlib=%s' % gui, #'--log-level=10' ]) IPKernelApp.pylab = 'inline' return kernel
def embed_kernel(module=None, local_ns=None, **kwargs): """Embed and start an IPython kernel in a given scope. Parameters ---------- module : ModuleType, optional The module to load into IPython globals (default: caller) local_ns : dict, optional The namespace to load into IPython user namespace (default: caller) kwargs : various, optional Further keyword args are relayed to the IPKernelApp constructor, allowing configuration of the Kernel. Will only have an effect on the first embed_kernel call for a given process. """ # get the app if it exists, or set it up if it doesn't if IPKernelApp.initialized(): app = IPKernelApp.instance() else: app = IPKernelApp.instance(**kwargs) app.initialize([]) # Undo unnecessary sys module mangling from init_sys_modules. # This would not be necessary if we could prevent it # in the first place by using a different InteractiveShell # subclass, as in the regular embed case. main = app.kernel.shell._orig_sys_modules_main_mod if main is not None: sys.modules[app.kernel.shell._orig_sys_modules_main_name] = main # load the calling scope if not given (caller_module, caller_locals) = extract_module_locals(1) if module is None: module = caller_module if local_ns is None: local_ns = caller_locals app.kernel.user_module = module app.kernel.user_ns = local_ns # START custom if hasattr(app, 'shell') and app.shell: app.shell.set_completer_frame() # END custom app.start()
def mpl_kernel(gui): """Launch and return an IPython kernel with matplotlib support for the desired gui """ kernel = IPKernelApp.instance() kernel.initialize( [ "python", "--matplotlib=%s" % gui, #'--log-level=10' ] ) return kernel
def execute(self, arbiter, props): shell = 'kernel-%d.json' % os.getpid() msg = None try: from IPython.kernel.zmq.kernelapp import IPKernelApp if not IPKernelApp.initialized(): app = IPKernelApp.instance() app.initialize([]) main = app.kernel.shell._orig_sys_modules_main_mod if main is not None: sys.modules[ app.kernel.shell._orig_sys_modules_main_name ] = main app.kernel.user_module = sys.modules[__name__] app.kernel.user_ns = {'arbiter': arbiter} app.shell.set_completer_frame() app.kernel.start() except Exception as e: shell = False msg = str(e) return {'shell': shell, 'msg': msg}
def fork_kernel(self, config, pipe, resource_limits, logfile): """ A function to be set as the target for the new kernel processes forked in ForkingKernelManager.start_kernel. This method forks and initializes a new kernel, uses the update_function to update the kernel's namespace, sets resource limits for the kernel, and sends kernel connection information through the Pipe object. :arg IPython.config.loader config: kernel configuration :arg multiprocessing.Pipe pipe: a multiprocessing connection object which will send kernel ip, session, and port information to the other side :arg dict resource_limits: a dict with keys resource.RLIMIT_* (see config_default documentation for explanation of valid options) and values of the limit for the given resource to be set in the kernel process """ os.setpgrp() logging.basicConfig(filename=self.filename,format=str(uuid.uuid4()).split('-')[0]+': %(asctime)s %(message)s',level=logging.DEBUG) logging.debug("kernel forked; now starting and configuring") try: ka = IPKernelApp.instance(config=config, ip=config["ip"]) from namespace import InstrumentedNamespace ka.user_ns = InstrumentedNamespace() # The following line on UNIX systems (and we are unlikely to run on # Windows) will lead to creation of a 1-second poller that will kill # this process as soon as its parent dies. More importanly, it will # prevent from execution the following if block: # https://github.com/ipython/ipython/blob/rel-2.1.0/IPython/kernel/zmq/kernelapp.py#L348 # which probably was filling some output buffer and used to severely # limit the number of computations possible without restarting the # server. TODO: figure out a better fix or confirm this is the one! ka.parent_handle = True ka.initialize([]) except: logging.exception("Error initializing IPython kernel") # FIXME: What's the point in proceeding after?! try: if self.update_function is not None: self.update_function(ka) except: logging.exception("Error configuring up kernel") logging.debug("finished updating") for r, limit in resource_limits.iteritems(): resource.setrlimit(getattr(resource, r), (limit, limit)) pipe.send({"ip": ka.ip, "key": ka.session.key, "shell_port": ka.shell_port, "stdin_port": ka.stdin_port, "hb_port": ka.hb_port, "iopub_port": ka.iopub_port}) pipe.close() # The following line will erase JSON connection file with ports and # other numbers. Since we do not reuse the kernels, we don't really need # these files. And new kernels set atexit hook to delete the file, but # it does not get called, perhaps because kernels are stopped by system # signals. The result is accumulation of files leading to disk quota # issues AND attempts to use stale files to connect to non-existing # kernels that eventually crash the server. TODO: figure out a better # fix, perhaps kernels have to be stopped in a more gentle fashion? ka.cleanup_connection_file() ka.start()
def _start_kernel(): """starts the ipython kernel and returns the ipython app""" import IPython from IPython.kernel.zmq.kernelapp import IPKernelApp from zmq.eventloop import ioloop # patch IPKernelApp.start so that it doesn't block def _IPKernelApp_start(self): if self.poller is not None: self.poller.start() self.kernel.start() # set up a timer to periodically poll the zmq ioloop loop = ioloop.IOLoop.instance() def poll_ioloop(timer_id, time): global _kernel_running # if the kernel has been closed then run the event loop until it gets to the # stop event added by IPKernelApp.shutdown_request if self.kernel.shell.exit_now: _log.debug("IPython kernel stopping (%s)" % self.connection_file) timer.kill_timer(timer_id) loop.start() _kernel_running = False return # otherwise call the event loop but stop immediately if there are no pending events loop.add_timeout(0, lambda: loop.add_callback(loop.stop)) loop.start() global _kernel_running _kernel_running = True timer.set_timer(100, poll_ioloop) IPKernelApp.start = _IPKernelApp_start # IPython expects sys.__stdout__ to be set sys.__stdout__ = sys.stdout sys.__stderr__ = sys.stderr # call the API embed function, which will use the monkey-patched method above IPython.embed_kernel() _ipython_app = IPKernelApp.instance() return _ipython_app
def main(unused_argv): sys.argv = ORIG_ARGV if not IS_KERNEL: # Drop all flags. sys.argv = [sys.argv[0]] # NOTE(sadovsky): For some reason, putting this import at the top level # breaks inline plotting. It's probably a bug in the stone-age version of # matplotlib. from IPython.html.notebookapp import NotebookApp # pylint: disable=g-import-not-at-top notebookapp = NotebookApp.instance() notebookapp.open_browser = True # password functionality adopted from quality/ranklab/main/tools/notebook.py # add options to run with "password" if FLAGS.password: from IPython.lib import passwd # pylint: disable=g-import-not-at-top notebookapp.ip = "0.0.0.0" notebookapp.password = passwd(FLAGS.password) else: print("\nNo password specified; Notebook server will only be available" " on the local machine.\n") notebookapp.initialize(argv=["--notebook-dir", FLAGS.notebook_dir]) if notebookapp.ip == "0.0.0.0": proto = "https" if notebookapp.certfile else "http" url = "%s://%s:%d%s" % (proto, socket.gethostname(), notebookapp.port, notebookapp.base_project_url) print("\nNotebook server will be publicly available at: %s\n" % url) notebookapp.start() return # Drop the --flagfile flag so that notebook doesn't complain about an # "unrecognized alias" when parsing sys.argv. sys.argv = ([sys.argv[0]] + [z for z in sys.argv[1:] if not z.startswith("--flagfile")]) from IPython.kernel.zmq.kernelapp import IPKernelApp # pylint: disable=g-import-not-at-top kernelapp = IPKernelApp.instance() kernelapp.initialize() # Enable inline plotting. Equivalent to running "%matplotlib inline". ipshell = kernelapp.shell ipshell.enable_matplotlib("inline") kernelapp.start()
def mpl_kernel(gui_backend): """ Launch and return an IPython kernel with matplotlib support. Parameters ---------- gui_backend -- string or None The GUI mode used to initialize the matplotlib mode. For options, see the `ipython --matplotlib` help pages. If None, the kernel is initialized without GUI support. """ kernel = IPKernelApp.instance() argv = ['python'] if gui_backend is not None: argv.append('--matplotlib={}'.format(gui_backend)) kernel.initialize(argv) return kernel
def __init__(self, gui): # Start IPython kernel with GUI event loop support self.ipkernel = IPKernelApp.instance() self.ipkernel.initialize(['python', '--gui=%s' % gui, #'--log-level=10' # for debugging ]) # To create and track active qt consoles self.consoles = [] # This application will also act on the shell user namespace self.namespace = self.ipkernel.shell.user_ns # Keys present at startup so we don't print the entire pylab/numpy # namespace when the user clicks the 'namespace' button self._init_keys = set(self.namespace.keys()) # Example: a variable that will be seen by the user in the shell, and # that the GUI modifies (the 'Counter++' button increments it): self.namespace['app_counter'] = 0
def default_kernel_app(): """ Return a configured IPKernelApp """ def event_loop(kernel): """ Non-blocking qt event loop.""" kernel.timer = QtCore.QTimer() kernel.timer.timeout.connect(kernel.do_one_iteration) kernel.timer.start(1000 * kernel._poll_interval) app = IPKernelApp.instance() try: app.initialize(['python', '--pylab=qt']) except ZMQError: pass # already set up app.kernel.eventloop = event_loop try: app.start() except RuntimeError: # already started pass return app
def tearDown(self): IPKernelApp.clear_instance()
def do_execute_direct(self, code): if not code.strip(): return self.log.debug('execute: %s' % code) shell_magic = self.line_magics['shell'] resp = shell_magic.eval(code.strip()) self.log.debug('execute done') return resp.strip() def get_completions(self, info): shell_magic = self.line_magics['shell'] return shell_magic.get_completions(info) def get_kernel_help_on(self, info, level=0, none_on_fail=False): code = info['code'].strip() if not code or len(code.split()) > 1: if none_on_fail: return None else: return "" shell_magic = self.line_magics['shell'] return shell_magic.get_help_on(info, level, none_on_fail) def repr(self, data): return data if __name__ == '__main__': from IPython.kernel.zmq.kernelapp import IPKernelApp IPKernelApp.launch_instance(kernel_class=MetaKernelBash)
from IPython.kernel.zmq.kernelapp import IPKernelApp from .kernel import CalystoScheme IPKernelApp.launch_instance(kernel_class=CalystoScheme)
super().__init__(*args, **kwargs) def do_execute(self, code, silent, store_history=True, user_expressions=None, allow_stdin=False): mochi_builtins.eval_code_block(code) if not silent: stream_content = {'name': 'stdout', 'text': self.output.read()} self.send_response(self.iopub_socket, 'stream', stream_content) else: pass #self.output.read() if self.error.peek(): stream_content = {'name': 'stderr', 'text': self.error.read()} self.send_response(self.iopub_socket, 'stream', stream_content) else: pass #self.error.read() return {'status': 'ok', # The base class increments the execution count 'execution_count': self.execution_count, 'payload': [], 'user_expressions': {}, } if __name__ == '__main__': from IPython.kernel.zmq.kernelapp import IPKernelApp IPKernelApp.launch_instance(kernel_class=MochiKernel)
def default_kernel_app(): from IPython.kernel.zmq.kernelapp import IPKernelApp app = IPKernelApp.instance() app.initialize(['python', '--pylab=qt']) app.kernel.eventloop = event_loop return app
output = self.smlnjwrapper.run_command(code, timeout=None) except KeyboardInterrupt: self.smlnjwrapper.child.sendintr() interrupted = True self.smlnjwrapper._expect_prompt() output = self.smlnjwrapper.child.before except EOF: output = self.smlnjwrapper.child.before + 'Restarting SML/NJ' self._start_smlnjang() if not silent: # Send standard output stream_content = {'name': 'stdout', 'text': output} self.send_response(self.iopub_socket, 'stream', stream_content) if interrupted: return {'status': 'abort', 'execution_count': self.execution_count} return { 'status': 'ok', 'execution_count': self.execution_count, 'payload': [], 'user_expressions': {} } # ===== MAIN ===== if __name__ == '__main__': from IPython.kernel.zmq.kernelapp import IPKernelApp IPKernelApp.launch_instance(kernel_class=SMLNJKernel)
from IPython.kernel.zmq.kernelbase import Kernel class NodeKernel(Kernel): implementation = "node-kernel" implementation_version = "test" language = "javascript" language_version = "test" banner = "test" def do_execute(self, code, silent, store_history=True, user_expressions=None, allow_stdin=False): if not silent: stream_content = {'name': 'stdout', 'data':'hi!'} self.send_response(self.iopub_socket, 'stream', stream_content) return {'status': 'ok', 'execution_count': self.execution_count, 'payload': [], 'user_expressions': {}} if __name__ == '__main__': from IPython.kernel.zmq.kernelapp import IPKernelApp IPKernelApp.launch_instance(kernel_class=NodeKernel)
allow_stdin=False): self.continuation = False self.ignore_output() code = self.remove_continuations(code.strip()) mata_magic = re.match(r'\s*%%mata\s+', code) if mata_magic: code = 'mata\n' + code[mata_magic.end():] + '\nend\n' try: self.stata_do(' ' + code + '\n') self.respond() except KeyboardInterrupt: self.stata.UtilSetStataBreak() self.respond() return {'status': 'abort', 'execution_count': self.execution_count} msg = { 'status': 'ok', 'execution_count': self.execution_count, 'payload': [], 'user_expressions': {} } return msg def do_shutdown(self, restart): self.stata_do(' exit, clear\n') if __name__ == '__main__': from IPython.kernel.zmq.kernelapp import IPKernelApp IPKernelApp.launch_instance(kernel_class=StataKernel)
def complete_registration(self, msg, connect, maybe_tunnel): # print msg self.loop.remove_timeout(self._abort_timeout) ctx = self.context loop = self.loop identity = self.bident idents, msg = self.session.feed_identities(msg) msg = self.session.unserialize(msg) content = msg['content'] info = self.connection_info def url(key): """get zmq url for given channel""" return str(info["interface"] + ":%i" % info[key]) if content['status'] == 'ok': self.id = int(content['id']) # launch heartbeat # possibly forward hb ports with tunnels hb_ping = maybe_tunnel(url('hb_ping')) hb_pong = maybe_tunnel(url('hb_pong')) hb_monitor = None if self.max_heartbeat_misses > 0: # Add a monitor socket which will record the last time a ping was seen mon = self.context.socket(zmq.SUB) mport = mon.bind_to_random_port('tcp://%s' % localhost()) mon.setsockopt(zmq.SUBSCRIBE, b"") self._hb_listener = zmqstream.ZMQStream(mon, self.loop) self._hb_listener.on_recv(self._report_ping) hb_monitor = "tcp://%s:%i" % (localhost(), mport) heart = Heart(hb_ping, hb_pong, hb_monitor, heart_id=identity) heart.start() # create Shell Connections (MUX, Task, etc.): shell_addrs = url('mux'), url('task') # Use only one shell stream for mux and tasks stream = zmqstream.ZMQStream(ctx.socket(zmq.ROUTER), loop) stream.setsockopt(zmq.IDENTITY, identity) shell_streams = [stream] for addr in shell_addrs: connect(stream, addr) # control stream: control_addr = url('control') control_stream = zmqstream.ZMQStream(ctx.socket(zmq.ROUTER), loop) control_stream.setsockopt(zmq.IDENTITY, identity) connect(control_stream, control_addr) # create iopub stream: iopub_addr = url('iopub') iopub_socket = ctx.socket(zmq.PUB) iopub_socket.setsockopt(zmq.IDENTITY, identity) connect(iopub_socket, iopub_addr) # disable history: self.config.HistoryManager.hist_file = ':memory:' # Redirect input streams and set a display hook. if self.out_stream_factory: sys.stdout = self.out_stream_factory(self.session, iopub_socket, u'stdout') sys.stdout.topic = cast_bytes('engine.%i.stdout' % self.id) sys.stderr = self.out_stream_factory(self.session, iopub_socket, u'stderr') sys.stderr.topic = cast_bytes('engine.%i.stderr' % self.id) if self.display_hook_factory: sys.displayhook = self.display_hook_factory( self.session, iopub_socket) sys.displayhook.topic = cast_bytes('engine.%i.execute_result' % self.id) self.kernel = Kernel(parent=self, int_id=self.id, ident=self.ident, session=self.session, control_stream=control_stream, shell_streams=shell_streams, iopub_socket=iopub_socket, loop=loop, user_ns=self.user_ns, log=self.log) self.kernel.shell.display_pub.topic = cast_bytes( 'engine.%i.displaypub' % self.id) # periodically check the heartbeat pings of the controller # Should be started here and not in "start()" so that the right period can be taken # from the hubs HeartBeatMonitor.period if self.max_heartbeat_misses > 0: # Use a slightly bigger check period than the hub signal period to not warn unnecessary self.hb_check_period = int(content['hb_period']) + 10 self.log.info( "Starting to monitor the heartbeat signal from the hub every %i ms.", self.hb_check_period) self._hb_reporter = ioloop.PeriodicCallback( self._hb_monitor, self.hb_check_period, self.loop) self._hb_reporter.start() else: self.log.info( "Monitoring of the heartbeat signal from the hub is not enabled." ) # FIXME: This is a hack until IPKernelApp and IPEngineApp can be fully merged app = IPKernelApp(parent=self, shell=self.kernel.shell, kernel=self.kernel, log=self.log) app.init_profile_dir() app.init_code() self.kernel.start() else: self.log.fatal("Registration Failed: %s" % msg) raise Exception("Registration Failed: %s" % msg) self.log.info("Completed registration with id %i" % self.id)
try: sys.path.remove(osp.dirname(__file__)) except ValueError: pass locals().pop('__file__') __doc__ = '' __name__ = '__main__' # Add current directory to sys.path (like for any standard Python interpreter # executed in interactive mode): sys.path.insert(0, '') # Fire up the kernel instance. from IPython.kernel.zmq.kernelapp import IPKernelApp ipk_temp = IPKernelApp.instance() ipk_temp.config = kernel_config() ipk_temp.initialize() # Grabbing the kernel's shell to share its namespace with our # Variable Explorer __ipythonshell__ = ipk_temp.shell # Issue 977 : Since kernel.initialize() has completed execution, # we can now allow the monitor to communicate the availablility of # the kernel to accept front end connections. __ipythonkernel__ = ipk_temp del ipk_temp # Change %edit to open files inside Spyder # NOTE: Leave this and other magic modifications *after* setting
def get_variable(self, name): """ Get a variable from the kernel language. """ python_magic = self.line_magics['python'] return python_magic.env.get(name, None) def do_execute_direct(self, code): python_magic = self.line_magics['python'] return python_magic.eval(code.strip()) def do_function_direct(self, function_name, arg): """ Call a function in the kernel language with args (as a single item). """ python_magic = self.line_magics['python'] return python_magic.eval("%s(%s)" % (function_name, arg)) def get_completions(self, info): python_magic = self.line_magics['python'] return python_magic.get_completions(info) def get_kernel_help_on(self, info, level=0, none_on_fail=False): python_magic = self.line_magics['python'] return python_magic.get_help_on(info, level, none_on_fail) if __name__ == '__main__': from IPython.kernel.zmq.kernelapp import IPKernelApp IPKernelApp.launch_instance(kernel_class=MetaKernelPython)
def banner(self): if self._banner is None: self._banner = check_output(['bash', '--version']).decode('utf-8') return self._banner def makeWrapper(self): """Start a bash shell and return a :class:`REPLWrapper` object. Note that this is equivalent :function:`metakernel.pyexpect.bash`, but is used here as an example of how to be cross-platform. """ if os.name == 'nt': prompt_regex = u('__repl_ready__') prompt_emit_cmd = u('echo __repl_ready__') prompt_change_cmd = None else: prompt_change_cmd = u("PS1='{0}' PS2='{1}' PROMPT_COMMAND=''") prompt_emit_cmd = None prompt_regex = re.compile('[$#]') extra_init_cmd = "export PAGER=cat" return REPLWrapper('bash', prompt_regex, prompt_change_cmd, prompt_emit_cmd=prompt_emit_cmd, extra_init_cmd=extra_init_cmd) if __name__ == '__main__': from IPython.kernel.zmq.kernelapp import IPKernelApp IPKernelApp.launch_instance(kernel_class=BashKernel)
unique=False): # See if we need to construct our cache if (IDL_kernel.history == None): from os.path import expanduser idldir = getattr(IDL, '!DIR') idlHistory = expanduser("~") + '/.idl/idl/rbuf/history' IDL_kernel.history = self.processIDLHistory(idlHistory) if (hist_access_type == "tail"): history = self.history[max(0, len(self.history) - n):] return {'history': history} # ------------------------------------------------------------------------- def do_interrupt(self, interrupt): self.log.debug("Interrupt IDL kernel...") return {'status': 'ok', 'interrupt': interrupt} # ------------------------------------------------------------------------- def do_shutdown(self, restart): self.log.debug("Shutting down IDL kernel...") return { 'status': 'ok', 'execution_count': self.execution_count, 'restart': restart } # ----------------------------------------------------------------------------- if __name__ == '__main__': from IPython.kernel.zmq.kernelapp import IPKernelApp IPKernelApp.launch_instance(kernel_class=IDL_kernel)
settings.setdefault('size', '560,420') width, height = 560, 420 if isinstance(settings['size'], tuple): width, height = settings['size'] elif settings['size']: try: width, height = settings['size'].split(',') width, height = int(width), int(height) except Exception as e: self.Error(e) size = "set(0, 'defaultfigurepaperposition', [0 0 %s %s])\n;" self.do_execute_direct(size % (width / 150., height / 150.)) def repr(self, obj): return obj def restart_kernel(self): """Restart the kernel""" self._matlab.stop() def do_shutdown(self, restart): with open('test.txt', 'w') as fid: fid.write('hey hey\n') self._matlab.stop() if __name__ == '__main__': from IPython.kernel.zmq.kernelapp import IPKernelApp IPKernelApp.launch_instance(kernel_class=MatlabKernel)
if not code or code[-1] == ' ': return default tokens = code.replace(';', ' ').split() if not tokens: return default token = tokens[-1] start = cursor_pos - len(token) cmd = 'compgen -cdfa %s' % token output = self.bashwrapper.run_command(cmd).rstrip() matches = output.split() if not matches: return default matches = [m for m in matches if m.startswith(token)] return { 'matches': matches, 'cursor_start': start, 'cursor_end': cursor_pos, 'metadata': dict(), 'status': 'ok' } if __name__ == '__main__': from IPython.kernel.zmq.kernelapp import IPKernelApp IPKernelApp.launch_instance(kernel_class=BashKernel)
): self.continuation = False self.ignore_output() code = self.remove_continuations(code.strip()) mata_magic = re.match(r'\s*%%mata\s+', code) if mata_magic: code = 'mata\n' + code[mata_magic.end():] + '\nend\n' try: self.stata_do(' ' + code + '\n') self.respond() except KeyboardInterrupt: self.stata.UtilSetStataBreak() self.respond() return {'status': 'abort', 'execution_count': self.execution_count} msg = { 'status': 'ok', 'execution_count': self.execution_count, 'payload': [], 'user_expressions': {} } return msg def do_shutdown(self, restart): self.stata_do(' exit, clear\n') if __name__ == '__main__': from IPython.kernel.zmq.kernelapp import IPKernelApp IPKernelApp.launch_instance(kernel_class=StataKernel)
from IPython.kernel.zmq.kernelapp import IPKernelApp from .kernel import PowerShellKernel IPKernelApp.launch_instance(kernel_class=PowerShellKernel)
line = '.' timeout = 3. while len(line) > 0 or timeout > 0.: try: line = self._gforth_que.get_nowait() # or q.get(timeout=.1) except Empty: line = '' if timeout > 0.: time.sleep(0.01) timeout -= 0.01 else: # got line output += line + '\n' timeout = 0. # Return results. if not silent: stream_content = {'name': 'stdout', 'data': output} self.send_response(self.iopub_socket, 'stream', stream_content) # Barf or return ok. if False: return {'status': 'error', 'execution_count': self.execution_count, 'ename': '', 'evalue': str(exitcode), 'traceback': []} else: return {'status': 'ok', 'execution_count': self.execution_count, 'payload': [], 'user_expressions': {}} if __name__ == '__main__': from IPython.kernel.zmq.kernelapp import IPKernelApp IPKernelApp.launch_instance(kernel_class=ForthKernel)
except KeyboardInterrupt: self.ch.sendintr() self.ch.expect(self.prompt) return [] except pexpect.EOF: #XXX: error handling return [] # parse output res = self.ch.before.decode(self.mysql_config["charset"]) l = res.split('\n') i = 0 table_names = [] while i < len(l): if len(l[i]) > 0: if l[i][0] == '*': i += 1 pos = l[i].find("table_name:") if pos != -1: #table_names.append(l[i][pos+len("table_name:"):].strip().upper()) table_names.append(l[i][pos + len("table_name:"):].strip()) i += 1 return sorted(table_names) if __name__ == '__main__': from IPython.kernel.zmq.kernelapp import IPKernelApp IPKernelApp.launch_instance(kernel_class=MySQLKernel)
dvr = dvr.replace('\\n', '\\\\n') lines.append(' Default: ' + dvr) lines.append('') help = trait.get_metadata('help') if help is not None: help = '\n\n'.join(wrap_paragraphs(help, 76)) lines.append(indent(help, 4)) else: lines.append(' No description') lines.append('') return '\n'.join(lines) kernel_classes = IPKernelApp().classes def write_doc(filename, title, classes, preamble=None): configdoc = document_config_options(classes) with open('source/config/options/%s.rst' % filename, 'w') as f: f.write(title + '\n') f.write(('=' * len(title)) + '\n') f.write('\n') if preamble is not None: f.write(preamble + '\n\n') f.write(configdoc) if __name__ == '__main__': write_doc('terminal', 'Terminal IPython options',
if not os.path.exists(self.hist_file): with open(self.hist_file, 'wb') as f: f.write('') with open(self.hist_file, 'rb') as f: history = f.readlines() history = history[:self.max_hist_cache] self.hist_cache = history self.log.debug('**HISTORY:') self.log.debug(history) history = [(None, None, h) for h in history] return {'history': history} def do_shutdown(self, restart): self.log.debug("**Shutting down") self.idlwrapper.child.kill(signal.SIGKILL) if self.hist_file: with open(self.hist_file,'wb') as f: data = '\n'.join(self.hist_cache[-self.max_hist_cache:]) f.write(data.encode('utf-8')) return {'status':'ok', 'restart':restart} if __name__ == '__main__': from IPython.kernel.zmq.kernelapp import IPKernelApp IPKernelApp.launch_instance(kernel_class=IDLKernel)
return {'status': 'ok', 'execution_count': self.execution_count, 'payload': [], 'user_expressions': {}} interrupted = False try: output = self.erlangwrapper.run_command(code, timeout=None) except KeyboardInterrupt: self.erlangwrapper.child.sendintr() interrupted = True self.erlangwrapper._expect_prompt() output = self.erlangwrapper.child.before except EOF: output = self.erlangwrapper.child.before + 'Restarting Erlang' self._start_erlang() if not silent: # Send standard output stream_content = {'name': 'stdout', 'text': output} self.send_response(self.iopub_socket, 'stream', stream_content) if interrupted: return {'status': 'abort', 'execution_count': self.execution_count} return {'status': 'ok', 'execution_count': self.execution_count, 'payload': [], 'user_expressions': {}} # ===== MAIN ===== if __name__ == '__main__': from IPython.kernel.zmq.kernelapp import IPKernelApp IPKernelApp.launch_instance(kernel_class=ErlangKernel)
def complete_registration(self, msg, connect, maybe_tunnel): # print msg self.loop.remove_timeout(self._abort_timeout) ctx = self.context loop = self.loop identity = self.bident idents,msg = self.session.feed_identities(msg) msg = self.session.unserialize(msg) content = msg['content'] info = self.connection_info def url(key): """get zmq url for given channel""" return str(info["interface"] + ":%i" % info[key]) if content['status'] == 'ok': self.id = int(content['id']) # launch heartbeat # possibly forward hb ports with tunnels hb_ping = maybe_tunnel(url('hb_ping')) hb_pong = maybe_tunnel(url('hb_pong')) hb_monitor = None if self.max_heartbeat_misses > 0: # Add a monitor socket which will record the last time a ping was seen mon = self.context.socket(zmq.SUB) mport = mon.bind_to_random_port('tcp://%s' % localhost()) mon.setsockopt(zmq.SUBSCRIBE, b"") self._hb_listener = zmqstream.ZMQStream(mon, self.loop) self._hb_listener.on_recv(self._report_ping) hb_monitor = "tcp://%s:%i" % (localhost(), mport) heart = Heart(hb_ping, hb_pong, hb_monitor , heart_id=identity) heart.start() # create Shell Connections (MUX, Task, etc.): shell_addrs = url('mux'), url('task') # Use only one shell stream for mux and tasks stream = zmqstream.ZMQStream(ctx.socket(zmq.ROUTER), loop) stream.setsockopt(zmq.IDENTITY, identity) shell_streams = [stream] for addr in shell_addrs: connect(stream, addr) # control stream: control_addr = url('control') control_stream = zmqstream.ZMQStream(ctx.socket(zmq.ROUTER), loop) control_stream.setsockopt(zmq.IDENTITY, identity) connect(control_stream, control_addr) # create iopub stream: iopub_addr = url('iopub') iopub_socket = ctx.socket(zmq.PUB) iopub_socket.setsockopt(zmq.IDENTITY, identity) connect(iopub_socket, iopub_addr) # disable history: self.config.HistoryManager.hist_file = ':memory:' # Redirect input streams and set a display hook. if self.out_stream_factory: sys.stdout = self.out_stream_factory(self.session, iopub_socket, u'stdout') sys.stdout.topic = cast_bytes('engine.%i.stdout' % self.id) sys.stderr = self.out_stream_factory(self.session, iopub_socket, u'stderr') sys.stderr.topic = cast_bytes('engine.%i.stderr' % self.id) if self.display_hook_factory: sys.displayhook = self.display_hook_factory(self.session, iopub_socket) sys.displayhook.topic = cast_bytes('engine.%i.execute_result' % self.id) self.kernel = Kernel(parent=self, int_id=self.id, ident=self.ident, session=self.session, control_stream=control_stream, shell_streams=shell_streams, iopub_socket=iopub_socket, loop=loop, user_ns=self.user_ns, log=self.log) self.kernel.shell.display_pub.topic = cast_bytes('engine.%i.displaypub' % self.id) # periodically check the heartbeat pings of the controller # Should be started here and not in "start()" so that the right period can be taken # from the hubs HeartBeatMonitor.period if self.max_heartbeat_misses > 0: # Use a slightly bigger check period than the hub signal period to not warn unnecessary self.hb_check_period = int(content['hb_period'])+10 self.log.info("Starting to monitor the heartbeat signal from the hub every %i ms." , self.hb_check_period) self._hb_reporter = ioloop.PeriodicCallback(self._hb_monitor, self.hb_check_period, self.loop) self._hb_reporter.start() else: self.log.info("Monitoring of the heartbeat signal from the hub is not enabled.") # FIXME: This is a hack until IPKernelApp and IPEngineApp can be fully merged app = IPKernelApp(parent=self, shell=self.kernel.shell, kernel=self.kernel, log=self.log) app.init_profile_dir() app.init_code() self.kernel.start() else: self.log.fatal("Registration Failed: %s"%msg) raise Exception("Registration Failed: %s"%msg) self.log.info("Completed registration with id %i"%self.id)
_ast = hy_compile(tokens, '__console__', root=ast.Interactive) _ast_for_print = ast.Module() _ast_for_print.body = _ast.body return astor.codegen.to_source(_ast_for_print) def do_complete(self, code, cursor_pos): # let IPython do the heavy lifting for variables, etc. txt, matches = self.shell.complete('', code, cursor_pos) # mangle underscores into dashes matches = [match.replace('_', '-') for match in matches] for p in list(_hy_macros.values()) + [_compile_table]: p = filter(lambda x: isinstance(x, str), p.keys()) p = [x.replace('_', '-') for x in p] matches.extend( [x for x in p if x.startswith(txt) and x not in matches]) return { 'matches': matches, 'cursor_end': cursor_pos, 'cursor_start': cursor_pos - len(txt), 'metadata': {}, 'status': 'ok' } if __name__ == '__main__': from IPython.kernel.zmq.kernelapp import IPKernelApp IPKernelApp.launch_instance(kernel_class=HyKernel)