def __init__(self, **kwargs): Kernel.__init__(self, **kwargs) self.code_obj = {} self.num_array = [] self.id_array = [] self.stack = [] self.environmrnt = {} self.instruction_set = { 0: self.LOAD_NUM, 1: self.DECL_FAST, 2: self.STORE_FAST, 3: self.LOAD_FAST, 4: self.ADD_TWO_NUMBERS, 5: self.SUBSTRACT_TWO_NUMBERS, 6: self.MULTIPLE_TWO_NUMBERS, # 7 is initially reversed for division, now use it for print 7: self.PRINT, 8: self.COMPARE_TWO_NUMBERS, 9: self.JUMP, 10: self.JUMP_IF_FALSE, 11: self.WHILE_LOOP, 13: self.QUIT, # 14 - 16 is used 17: self.LOAD_STRING } self.false_flag = 0 self.pc = 0 self.quit_flag = 0 self.silent = None
def __init__(self, **kwargs): Kernel.__init__(self, **kwargs) self.tts = speechd.Speaker("jupyter_itts_kernel") self.possibleCommands = defaultdict(CommandDescriptor) self.initializeMagics() self.possibleCommands = ChosenMagicsCompleter(self.possibleCommands)
def __init__(self, **kwargs): Kernel.__init__(self, **kwargs) repl = subprocess_repl.SubprocessRepl([ 'C:\\Program Files\\SimVascular\\SimVascular\\REPLACE_SV_TIMESTAMP\\sv.bat', '-tk', '--' ]) self.proxy = simvascular_tk_proxy.ReplProxy(repl)
def __init__(self, **kwargs): start_time = time.time() Kernel.__init__(self, **kwargs) self.ansible_cfg = None self.ansible_process = None self.current_play = None self.next_task_file = None self.task_files = [] self.playbook_file = None self.silent = False self.runner = None self.runner_thread = None self.shutdown_requested = False self.shutdown = False self.default_inventory = "[all]\nlocalhost ansible_connection=local\n" self.default_play = yaml.dump(dict(hosts='localhost', name='default', gather_facts=False)) self.temp_dir = tempfile.mkdtemp(prefix="ansible_kernel_playbook") self.queue = queue.Queue() self.tasks_counter = 0 self.current_task = None logger.debug(self.temp_dir) os.mkdir(os.path.join(self.temp_dir, 'env')) os.mkdir(os.path.join(self.temp_dir, 'project')) os.mkdir(os.path.join(self.temp_dir, 'project', 'roles')) with open(os.path.join(self.temp_dir, 'env', 'settings'), 'w') as f: f.write(json.dumps(dict(idle_timeout=0, job_timeout=0))) self.do_inventory(self.default_inventory) self.do_execute_play(self.default_play) logger.info("Kernel init finished took %s", time.time() - start_time)
def __init__(self, **kwargs): Kernel.__init__(self, **kwargs) repl = subprocess_repl.SubprocessRepl([ '/usr/local/package/simvascular/REPLACE_SV_TIMESTAMP/simvascular', '-python', '--', '-i' ]) self.proxy = simvascular_python_proxy.ReplProxy(repl)
def __init__(self, **kwargs): Kernel.__init__(self, **kwargs) self._coqtop = CoqtopWrapper(self) self._journal = CellJournal(self) self._comms = {} for msg_type in ['comm_open', 'comm_msg', 'comm_close']: self.shell_handlers[msg_type] = getattr(self, msg_type)
def __init__(self, **kwargs): Kernel.__init__(self, **kwargs) self.definitions = Definitions(add_builtin=True) # TODO Cache self.definitions.set_ownvalue('$Line', Integer(0)) # Reset the line number self.establish_comm_manager() # needed for ipywidgets and Manipulate[] self.web_engine = None
def __init__(self, **kwargs): Kernel.__init__(self, **kwargs) self.log.setLevel(logging.INFO) self.delimiter = ";" self.client_config = ClientConfig(self.log) self.mariadb_client = MariaDBClient(self.log, self.client_config) self.mariadb_server = None self.data = {"last_select": pandas.DataFrame([])} try: self.mariadb_client.start() except ServerIsDownError: if not self.client_config.start_server(): self.log.error( "The options passed through mariadb_kernel.json " "prevent the kernel from starting a testing " "MariaDB server instance") raise # Start a single MariaDB server for a better experience # if user wants to quickly test the kernel self.mariadb_server = MariaDBServer(self.log, self.client_config) self.mariadb_server.start() # Reconnect the client now that the server is up if self.mariadb_server.is_up(): self.mariadb_client.start()
def __init__(self, **kwargs): Kernel.__init__(self, **kwargs) # Get context r = requests.post('{}/ctx/new'.format(self.address)) self.context = r.text
def __init__(self, **kwargs): Kernel.__init__(self, **kwargs) if os.path.exists(self.mysql_setting_file): with open(self.mysql_setting_file,"r") as f: self.mysql_config.update(json.load(f)) self.parser = MysqlParser() self.connect()
def __init__(self, target_name='', data=None, metadata=None, buffers=None, **kwargs): self.primary = True # Am I the primary or secondary Comm? self.target_name = target_name # requirejs module from which to load comm target self.target_module = kwargs.get('target_module', None) self.open_hook = None self._closed = True self._close_callback = None self._msg_callback = None try: self.kernel = kwargs['kernel'] except KeyError: if Kernel.initialized(): self.kernel = Kernel.instance() else: self.kernel = None try: self.comm_id = kwargs['comm_id'] except KeyError: self.comm_id = uuid.uuid4().hex self.topic = kwargs.get('topic', ('comm-%s' % self.comm_id).encode('ascii')) self.log = log.get_logger() if self.kernel: if self.primary: # I am primary, open my peer. self.open(data=data, metadata=metadata, buffers=buffers) else: self._closed = False
def __init__(self, **kwargs): Kernel.__init__(self, **kwargs) self._coqtop = Coqtop(self, self.coqtop_args) self._journal = CellJournal(self) self._renderer = Renderer() self._kernel_comms = [] for msg_type in ['comm_open', 'comm_msg', 'comm_close']: self.shell_handlers[msg_type] = getattr(self, msg_type)
def __init__(self, **kwargs): Kernel.__init__(self, **kwargs) log('inside init') # Catch KeyboardInterrupt, cancel query, raise QueryCancelledError psycopg2.extensions.set_wait_callback(wait_select_inter) self._conn_string = os.getenv('DATABASE_URL', '') self._conn = None self._start_connection()
def __init__(self, **kwargs): Kernel.__init__(self, **kwargs) env_interpreter = os.environ.get(ENV_WASM_INTERPRETER, "wasm") self._interpreter_path = shutil.which(env_interpreter) if self._interpreter_path is None: raise Exception("Unable to find a `%s` executable in $PATH: %s" % (env_interpreter, os.environ.get("PATH"))) self._start_wasm()
def __init__(self, **kwargs): Kernel.__init__(self, **kwargs) log('inside init') # Catch KeyboardInterrupt, cancel query, raise QueryCancelledError psycopg2.extensions.set_wait_callback(wait_select_inter) self._conn_string = os.getenv('DATABASE_URL', '') self._autocommit = True self._conn = None self._start_connection()
def __init__(self, **kwargs): Kernel.__init__(self, **kwargs) self.silent = False self.dc = deviceconnector.DeviceConnector(self.sres, self.sresSYS) self.mpycrossexe = None self.srescapturemode = 0 # 0 none, 1 print lines, 2 print on-going line count (--quiet), 3 print only final line count (--QUIET) self.srescapturedoutputfile = None # used by %capture command self.srescapturedlinecount = 0 self.srescapturedlasttime = 0 # to control the frequency of capturing reported
def __init__(self, **kwargs): Kernel.__init__(self, **kwargs) logger = logging.getLogger('rax_kernel') fh = logging.FileHandler('rax_kernel.log') fmt = logging.Formatter('%(asctime)s %(levelname)s %(message)s') fh.setFormatter(fmt) logger.addHandler(fh) logger.setLevel(logging.DEBUG) self.logger = logger self.rax_running = False
def __init__(self, **kwargs): Kernel.__init__(self, **kwargs) # The path to the unix micropython should be in the PATH, if not check # the env for MPUNIX #TODO: Use shutil.which() in python 3.3+ self.micropython_exe = 'micropython' if os.environ.get('MPUNIX') is not None: self.micropython_exe = os.environ.get('MPUNIX') self._start_interpreter()
def __init__(self, **kwargs): Kernel.__init__(self, **kwargs) self._replace_get_ipython() self.comm_manager = CommManager(shell=None, parent=self, kernel=self) self.shell_handlers['comm_open'] = self.comm_manager.comm_open self.shell_handlers['comm_msg'] = self.comm_manager.comm_msg self.shell_handlers['comm_close'] = self.comm_manager.comm_close if ipywidgets_extension_loaded: self.comm_manager.register_target('ipython.widget', Widget.handle_comm_opened) self._start_polymake()
def __init__(self, **kwargs): Kernel.__init__(self, **kwargs) self._replace_get_ipython() self.comm_manager = CommManager(shell=None, parent=self, kernel=self) self.shell_handlers["comm_open"] = self.comm_manager.comm_open self.shell_handlers["comm_msg"] = self.comm_manager.comm_msg self.shell_handlers["comm_close"] = self.comm_manager.comm_close self.comm_manager.register_target("ipython.widget", Widget.handle_comm_opened) self._start_singular()
def __init__(self, **kwargs): Kernel.__init__(self, **kwargs) self._start_jython() try: self.hist_file = os.path.join(locate_profile(),'jython_kernel.hist') except: self.hist_file = None self.log.warn('No default profile found, history unavailable') self.max_hist_cache = 1000 self.hist_cache = []
def __init__(self, **kwargs): self.mathjax_initialized = False Kernel.__init__(self, **kwargs) if self.log is None: # This occurs if we call as a stand-alone kernel # (eg, not as a process) # FIXME: take care of input/output, eg StringIO # make work without a session self.log = logging.Logger("NotebookApp") self.definitions = Definitions(add_builtin=True) # TODO Cache self.definitions.set_ownvalue('$Line', Integer(0)) # Reset the line number
def __init__(self, **kwargs): """ Constructor """ Kernel.__init__(self, **kwargs) opt = "" if "LOG4J_CONF_FILE" in os.environ: opt += " -4 " + os.environ["LOG4J_CONF_FILE"] # Start grunt self.pig = pexpect.spawn("/opt/pig-0.15.0/bin/pig -x local " + opt) # Wait until grunt start self.pig.expect(GRUNT_NEW_LINE_MODEL, timeout=GRUNT_START_TIMEOUT)
def __init__(self, **kwargs): Kernel.__init__(self, **kwargs) # powershell_command env variable is set by the kernel to allow both powershell and pwsh # but on python2 we cannot pass it thru env variable, see https://github.com/vors/jupyter-powershell/issues/7 # TODO(python2): can we pass it somehow differently and still provide user-picked value on python2? try: powershell_command = environ['powershell_command'] except: powershell_command = get_powershell() repl = subprocess_repl.SubprocessRepl([powershell_command, '-noprofile', '-File', '-']) self.proxy = powershell_proxy.ReplProxy(repl)
def __init__(self, **kwargs): """ Constructor """ Kernel.__init__(self, **kwargs); opt = ""; if "LOG4J_CONF_FILE" in os.environ: opt += " -4 " + os.environ["LOG4J_CONF_FILE"]; # Start grunt self.pig = pexpect.spawn("/opt/pig-0.15.0/bin/pig -x local " + opt); # Wait until grunt start self.pig.expect(GRUNT_NEW_LINE_MODEL, timeout=GRUNT_START_TIMEOUT);
def __init__(self, **kwargs): conf_file = os.path.expanduser(CONFIG_FILE) if os.path.isfile(conf_file): with open(conf_file, mode='r') as file_hanlde: self.conf = json.load(file_hanlde) pyhiveconf, sql_req = self.parse_code(code="") self.create_conn(**pyhiveconf) #if self.last_conn is None: # raise ConnectionNotCreated() Kernel.__init__(self, **kwargs)
def __init__(self, **kwargs): Kernel.__init__(self, **kwargs) logger = logging.getLogger('ansible_kernel.kernel.__init__') self.ansible_cfg = None self.ansible_process = None self.current_play = None self.default_play = yaml.dump( dict(hosts='localhost', name='default', gather_facts=False)) self.temp_dir = tempfile.mkdtemp(prefix="ansible_kernel_playbook") self.queue = Queue() self.tasks_counter = 0 self.current_task = None logger.debug(self.temp_dir) self.do_execute_play(self.default_play)
def __init__(self, **kwargs): """ Constructor """ Kernel.__init__(self, **kwargs); opt = ""; if "LOG4J_CONF_FILE" in os.environ: opt += " -4 " + os.environ["LOG4J_CONF_FILE"]; # Start grunt pig_home = "/usr/pig" if os.environ.get("PIG_HOME") == None else os.environ["PIG_HOME"] self.pig = pexpect.spawn(pig_home + "/bin/pig -x local " + opt); # Wait until grunt start self.pig.expect(GRUNT_NEW_LINE_MODEL, timeout=GRUNT_START_TIMEOUT);
def __init__(self, **kwargs): Kernel.__init__(self, **kwargs) self._start_ncl() try: self.hist_file = os.path.join(locate_profile(),'ncl_kernel.hist') except: self.hist_file = None self.log.warn('No default profile found, history unavailable') self.max_hist_cache = 1000 self.hist_cache = [] #self._default_matches=cPickle.load(open('data/inbuilt_list', 'rb')) self._default_matches=self.inbuiltlist()
def hold_comm_open(kernel=None): if kernel is None: if not Kernel.initialized(): raise ValueError( 'No kernel passed, and current kernel not initialized') kernel = Kernel.instance() comm_manager = getattr(kernel, 'comm_manager', None) if comm_manager is None: raise RuntimeError("Comms cannot be opened without a kernel " "and a comm_manager attached to that kernel.") patched_comms = [] messages = [] buffers = [] def patched_register_comm(comm): original_publish_msg = comm._publish_msg def patched_publish_msg(msg_type, **kwargs): if msg_type != 'comm_open': original_publish_msg(msg_type, **kwargs) msg, msg_bufs = _convert_message(comm, **kwargs) messages.append(msg) buffers.extend(msg_bufs) patched_comms.append((comm, original_publish_msg)) comm._publish_msg = patched_publish_msg original_register_comm = comm_manager.register_comm comm_manager.register_comm = patched_register_comm try: yield finally: comm_manager.register_comm = original_register_comm for comm, original_publish_msg in patched_comms: comm._publish_msg = original_publish_msg data = dict(messages=messages, ) args = dict(target_name='jupyter.widget-tunnel', data=data, buffers=buffers, metadata={'version': __protocol_version__}) comm = Comm(**args) comm.close()
def __init__(self, **kwargs): Kernel.__init__(self, **kwargs) self.delimiter = ";" self.client_config = ClientConfig(self.log) self.mariadb_client = MariaDBClient(self.log, self.client_config) self.mariadb_server = None self.data = {"last_select": pandas.DataFrame([])} if self.client_config.debug_logging(): self.log.setLevel(logging.DEBUG) else: self.log.setLevel(logging.INFO) try: self.mariadb_client.start() except ServerIsDownError: if not self.client_config.start_server(): self.log.error( "The options passed through mariadb_kernel.json " "prevent the kernel from starting a testing " "MariaDB server instance") raise # Start a single MariaDB server for a better experience # if user wants to quickly test the kernel self.mariadb_server = MariaDBServer(self.log, self.client_config) self.mariadb_server.start() # Reconnect the client now that the server is up if self.mariadb_server.is_up(): self.mariadb_client.start() # Create autocompletion/introspection objects based on whether # the user enabled this feature or not self.autocompleter = None self.introspector = None if self.client_config.autocompletion_enabled(): try: self.autocompleter = Autocompleter(self.mariadb_client, self.client_config, self.log) self.introspector = Introspector() except: # Something went terribly wrong, disabling the feature self.log.error( "Code completion functionalities were disabled due to an unexpected error" ) self.autocompleter = None self.introspector = None
def __init__(self, **kwargs): Kernel.__init__(self, **kwargs) self._start_ncl() self.pexpect_version=pexpect.__version__ try: self.hist_file = os.path.join(locate_profile(),'ncl_kernel.hist') except: self.hist_file = None self.log.warn('No default profile found, history unavailable') self.max_hist_cache = 1000 self.hist_cache = [] #self._default_matches=cPickle.load(open('data/inbuilt_list', 'rb')) self._default_matches=self.inbuiltlist()
def __init__(self, **kwargs): Kernel.__init__(self, **kwargs) # Instantiate IPython.core.debugger.Pdb here, pass it a phony # stdout that provides a dummy flush() method and a write() method # that internally sends data using a function so that it can # be initialized to use self.send_response() write_func = lambda s: self.send_response(self.iopub_socket, 'stream', {'name': 'stdout', 'text': s}) sys.excepthook = functools.partial(BdbQuit_excepthook, excepthook=sys.excepthook) self.debugger = Pdb(stdout=PhonyStdout(write_func)) self.debugger.set_trace(sys._getframe().f_back)
def __init__(self, **kwargs): Kernel.__init__(self, **kwargs) ON_POSIX = 'posix' in sys.builtin_module_names def enqueue_output(out, queue): for line in iter(out.readline, b''): queue.put(line) out.close() self._gforth = Popen('gforth', stdin=PIPE, stdout=PIPE, bufsize=2, close_fds=ON_POSIX) self._gforth_queue = Queue() t = Thread(target=enqueue_output, args=(self._gforth.stdout, self._gforth_queue)) t.daemon = True t.start()
def test_rank_symbol_theme_layer(self, mock_send): """ :type mock_send:Mock :type layer:RankSymbolThemeLayer :param mock_send: :return: """ layer = RankSymbolThemeLayer(name='test1', data=[{ 'name': '北京市', 'value': 23014.59 }, { 'name': '天津市', 'value': 16538.189999999999 }, { 'name': '河北省', 'value': 29806.110000000001 }], address_key='name', value_key='value') layer._map = MapView() comm = Comm() comm.kernel = Kernel() layer.comm = comm layer.name = "test" expected = { 'method': 'update', 'state': { 'name': 'test' }, 'buffer_paths': [] } mock_send.assert_called_with(data=expected, buffers=[])
def test_cloud_tile_layer(self, mock_send): """ :type mock_send:Mock :param mock_send: :return: """ layer = CloudTileLayer() layer._map = MapView() comm = Comm() comm.kernel = Kernel() layer.comm = comm layer.map_name = 'quanguo' expected = { 'method': 'update', 'state': { 'map_name': 'quanguo' }, 'buffer_paths': [] } mock_send.assert_called_with(data=expected, buffers=[]) layer.type = 'web' expectedVisibility = { 'method': 'update', 'state': { 'type': 'web' }, 'buffer_paths': [] } mock_send.assert_called_with(data=expectedVisibility, buffers=[]) self.assertEqual(mock_send.call_count, 2)
def __init__(self, **kwargs): Kernel.__init__(self, **kwargs) self.dir = get_kernel_spec('vim_kernel').resource_dir self.vim = Popen([ 'vim', '-X', '-N', '-u', 'NONE', '-i', 'NONE', '-e', '-s', '-S', path.join(self.dir, 'kernel.vim') ], stdout=PIPE, stderr=STDOUT, shell=False, env=environ.copy())
def apply(self, expr, args, evaluation): 'Manipulate[expr_, args__]' if (not _jupyter) or (not Kernel.initialized()) or (Kernel.instance() is None): return evaluation.message('Manipulate', 'jupyter') instantiator = _WidgetInstantiator() # knows about the arguments and their widgets for arg in args.get_sequence(): try: if not instantiator.add(arg, evaluation): # not a valid argument pattern? return except IllegalWidgetArguments as e: return evaluation.message('Manipulate', 'widgetargs', strip_context(str(e.var))) except JupyterWidgetError as e: return evaluation.message('Manipulate', 'widgetmake', e.err) clear_output_callback = evaluation.output.clear display_data_callback = evaluation.output.display # for pushing updates try: clear_output_callback(wait=True) except NotImplementedError: return evaluation.message('Manipulate', 'imathics') def callback(**kwargs): clear_output_callback(wait=True) line_no = evaluation.definitions.get_line_no() vars = [Expression('Set', Symbol(name), value) for name, value in kwargs.items()] evaluatable = Expression('ReleaseHold', Expression('Module', Expression('List', *vars), expr)) result = evaluation.evaluate(evaluatable, timeout=settings.TIMEOUT) if result: display_data_callback(data=result.result, metadata={}) evaluation.definitions.set_line_no(line_no) # do not increment line_no for manipulate computations widgets = instantiator.get_widgets() if len(widgets) > 0: box = _interactive(instantiator.build_callback(callback), widgets) # create the widget formatter = IPythonDisplayFormatter() if not formatter(box): # make the widget appear on the Jupyter notebook return evaluation.message('Manipulate', 'widgetdisp') return Symbol('Null') # the interactive output is pushed via kernel.display_data_callback (see above)
def __init__(self, **kwargs): Kernel.__init__(self, **kwargs) repl = subprocess_repl.SubprocessRepl(['C:\\cygwin64\\bin\\bash.exe','-i']) self.proxy = cygwin_bash_proxy.ReplProxy(repl)
def __init__(self, **kwargs): Kernel.__init__(self, **kwargs) repl = subprocess_repl.SubprocessRepl(['C:\\Program Files\\SimVascular\\SimVascular\\2019-02-05\\sv.bat', '-tcl', '--']) self.proxy = simvascular_tcl_proxy.ReplProxy(repl)
def __init__(self, **kwargs): Kernel.__init__(self, **kwargs) self.cookery = Cookery(jupyter=True) self.cookery.log = self.log.getChild(self.implementation)
def __init__(self, **kwargs): Kernel.__init__(self, **kwargs) self._start_magma()
def __init__(self, **kwargs): Kernel.__init__(self, **kwargs) self.tcl = Tkinter.Tcl() self.execution_count = 0 putsredef = 'rename puts original_puts \nproc puts {args} {\n if {[llength $args] == 1} {\n return "=> [lindex $args 0]"\n } else {\n eval original_puts $args\n }\n}\n' self.tcl.eval(putsredef)
def __init__(self, **kwargs): Kernel.__init__(self, **kwargs) self.tcl = Tkinter.Tcl() self.execution_count = 0
def __init__(self, **kwargs): Kernel.__init__(self, **kwargs) repl = subprocess_repl.SubprocessRepl(['C:\\Program Files\\SimVascular\\SimVascular\\REPLACE_SV_TIMESTAMP\\sv.bat', '-python', '--', '-i']) self.proxy = simvascular_python_proxy.ReplProxy(repl)
def _default_kernel(self): if Kernel.initialized(): return Kernel.instance()
def __init__(self, **kwargs): Kernel.__init__(self, **kwargs) self.repl = ReplWrapper()
def __init__(self, **kwargs): Kernel.__init__(self, **kwargs) self._start_spark()
def __init__(self, **kwargs): Kernel.__init__(self, **kwargs) self.hostname = os.environ.get('CASSANDRA_HOSTNAME','localhost') self._start_cql()
def __init__(self, **kwargs): Kernel.__init__(self, **kwargs) repl = subprocess_repl.SubprocessRepl(['/usr/local/package/simvascular/REPLACE_SV_TIMESTAMP/simvascular', '-python', '--', '-i']) self.proxy = simvascular_python_proxy.ReplProxy(repl)
def __init__(self, **kwargs): Kernel.__init__(self, **kwargs) self._start_gap()
def __init__(self, **kwargs): Kernel.__init__(self, **kwargs) self.client = command.Client() self.qsh = sh.QSh(self.client)
def __init__(self, **kwargs): Kernel.__init__(self, **kwargs) self._start_interpreter()
def __init__(self, **kwargs): Kernel.__init__(self, **kwargs) self.start_redis(**kwargs) self.get_commands() self.start_history()
def __init__(self, **kwargs): Kernel.__init__(self, **kwargs) self.definitions = Definitions(add_builtin=True) # TODO Cache self.definitions.set_ownvalue('$Line', Integer(0)) # Reset the line number self.establish_comm_manager() # needed for ipywidgets and Manipulate[]
def __init__(self, **kwargs): Kernel.__init__(self, **kwargs) self.omc = OMCSessionZMQ() self.matfile = None