def _get_kernel(): # Get Jupyter console kernel info. If there is no console, # we'll fail here cf = jupyter_client.find_connection_file() km = jupyter_client.BlockingKernelClient(connection_file=cf) km.load_connection_file() return km
def start(self, metadata: Dict[str, str], database: int = 1) -> None: """Method called in a periodically scheduled async worker that should check the dev env and manage Activity Monitor Instances as needed Args: metadata(dict): A dictionary of data to start the activity monitor database(int): The database ID to use Returns: None """ # Connect to the kernel cf = jupyter_client.find_connection_file( metadata["kernel_id"], path=os.environ['JUPYTER_RUNTIME_DIR']) km = jupyter_client.BlockingKernelClient() with open(cf, 'rt') as cf_file: cf_data = json.load(cf_file) # Get IP address of lab book container on the bridge network container_ip = self.get_container_ip() if not container_ip: raise ValueError("Failed to find LabBook container IP address.") cf_data['ip'] = container_ip km.load_connection_info(cf_data) # Get connection to the DB redis_conn = redis.Redis(db=database) try: while True: try: # Check for messages, waiting up to 1 second. This is the rate that records will be merged msg = km.get_iopub_msg(timeout=1) self.handle_message(msg) except queue.Empty: # if queue is empty and the record is ready to store, save it! if self.can_store_activity_record is True: self.store_record(metadata) # Check if you should exit if redis_conn.hget(self.monitor_key, "run").decode() == "False": logger.info( "Received Activity Monitor Shutdown Message for {}". format(metadata["kernel_id"])) break except Exception as err: logger.error( "Error in JupyterLab Activity Monitor: {}".format(err)) finally: # Delete the kernel monitor key so the dev env monitor will spin up a new process # You may lose some activity if this happens, but the next action will sweep up changes redis_conn.delete(self.monitor_key)
def __init__(self, info=None): """Create a new spy. Either supply connection information here, or using the `connect` method. """ self.client = jupyter_client.BlockingKernelClient() if info is not None: self.connect(info)
def create_client(name): cf = find_connection_file('emacs-' + name) c = client.BlockingKernelClient(connection_file=cf) c.load_connection_file() c.start_channels() chans = [('io', c.get_iopub_msg), ('shell', c.get_shell_msg), ('stdin', c.get_stdin_msg)] for name, ch in chans: t = threading.Thread(target=msg_router, args=(name, ch)) t.start() return c
def create_client(name): if name.endswith('.json'): cf = find_connection_file(name) else: cf = find_connection_file('emacs-' + name) c = client.BlockingKernelClient(connection_file=cf) c.load_connection_file() c.start_channels() io, shell = c.get_iopub_msg, c.get_shell_msg t = threading.Thread(target=msg_router, args=(io, shell)) t.setDaemon(True) t.start() return c
def create_client(name): if name.endswith('.json'): # Received an existing kernel we should connect to. cf = find_connection_file(name) else: cf = find_connection_file('emacs-' + name) c = client.BlockingKernelClient(connection_file=cf) c.load_connection_file() c.start_channels() chans = [('io', c.get_iopub_msg), ('shell', c.get_shell_msg), ('stdin', c.get_stdin_msg)] for name, ch in chans: t = threading.Thread(target=msg_router, args=(name, ch)) t.start() return c
def kernel(f): if verbose: logging.info("kernel connecting " + f) cf = jupyter_client.find_connection_file(f) km = jupyter_client.BlockingKernelClient(connection_file=cf) # load connection info and init communication km.load_connection_file() km.start_channels() try: km.wait_for_ready(timeout=network_timeout) except: logging.error("he's dead, jim") sys.exit(1) return km
def initialize_remote(self, connection_file, ssh=None): """ Initialize the client and connect to a kernel (possibly remote), if it isn't yet initialized. """ import jupyter_client if not self.initialized.get(): self.client = jupyter_client.BlockingKernelClient() if ssh is None: self.client.load_connection_file(connection_file) else: with open(connection_file) as f: parsed = json.load(f) newports = jupyter_client.tunnel_to_kernel( connection_file, ssh) ( parsed["shell_port"], parsed["iopub_port"], parsed["stdin_port"], parsed["hb_port"], parsed["control_port"], ) = newports with open(connection_file, "w") as f: json.dump(parsed, f) self.client.load_connection_file(connection_file) self.client.start_channels() try: print("Connecting to the kernel...") self.client.wait_for_ready(timeout=60) except RuntimeError as err: self.client.stop_channels() print("Could not connect to existing kernel: %s" % err, file=sys.stderr) return self.initialize_common()
def start_server(self): from glob import glob if self.jupyter_process is not None: self.stop_server() json_dir = jupyter_core.paths.jupyter_runtime_dir() jsons_before = glob(os.path.join(json_dir, "*.json")) args = shlex.split("jupyter notebook --no-browser") self.jupyter_process = p = subprocess.Popen(args=args, stderr=subprocess.PIPE) # figure out the url from the initial console messages from re import search line = p.stderr.readline().decode(encoding).strip() matches = search("http://.*:(.*?)/", line) # print(line) while matches is None: line = p.stderr.readline().decode(encoding).strip() # print(line) matches = search("http://.*:(.*?)/", line) url = "http://{}:{}/".format("localhost", matches.groups(1)[0]) # print("URL: " + url) self.url = QtCore.QUrl(url) print("Notebook at: {}".format(self.url)) from time import sleep sleep(.1) jsons_after = glob(os.path.join(json_dir, "*.json")) for json in jsons_after: if json not in jsons_before: self.connection_json = json break else: raise FileNotFoundError( "Could not find the connection JSON in {}.".format(json_dir)) self.notebook_client = jupyter_client.BlockingKernelClient( connection_file=self.connection_json) self.notebook_client.load_connection_file() self.notebook_client.start_channels() self.notebook_client.execute("test=123")
import os import jupyter_client # Path of the kernel connection file cf = 'kernel-23932.json' # Setup up a blocking kernel client using kernel connection file km = jupyter_client.BlockingKernelClient( connection_file='/run/user/1000/jupyter/' + cf) # load the connection settings km.load_connection_file() direct = '/home/soumitra/Desktop/Projects/GSOC/Scilab-gsoc/Experimentation/Testing/Tester.py' km.execute_interactive('exec(open(\'' + direct + '\').read())') print("Training done!")
import sys import jupyter_client cf = sys.argv[1] modelName = sys.argv[2] # Setup up a blocking kernel client using kernel connection file km = jupyter_client.BlockingKernelClient( connection_file='/home/soumitra/.local/share/jupyter/runtime/' + cf) # load the connection settings km.load_connection_file() print("Training started!") # execute any python commands on remote IPython kernel km.execute('exec(open(\'/home/soumitra/Scripts/' + modelName + '.py\').read())') print("Training done!")
import jupyter_client from shutil import copyfile cf = "kernel-4552.json" km = jupyter_client.BlockingKernelClient(connection_file=cf) km.load_connection_file() km.execute('execfile("cnn_test.py")')