def make_comm() -> None: global _JupyterComm J_LOGGER.info("IPYTHON: Registering Comms") comm_target_name = COMM_NAME jupyter_comm = Comm(target_name=comm_target_name) def _get_command(msg) -> Optional[str]: return msg["content"]["data"].get("command", None) @jupyter_comm.on_msg def _recv(msg): if _get_command(msg) == "merge_notebooks": J_LOGGER.info("GOT UPDATE STATUS") merge_notebooks(jupyter_comm, msg["content"]["data"]) return J_LOGGER.info("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") J_LOGGER.info(msg) J_LOGGER.info("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") # store comm for access in this thread later _JupyterComm = jupyter_comm J_LOGGER.info("==> Success") return _JupyterComm
def register_server(notebook_path: str, port_number: int) -> None: J_LOGGER.info("Registering notebook {notebook} on port {port}", notebook=notebook_path, port=port_number) _REGISTERED_SERVERS[notebook_path] = port_number J_LOGGER.debug("Updated notebook mappings: {}", _REGISTERED_SERVERS)
def wrapper(*args, **kwargs): J_LOGGER.debug("{}: {}", start_msg, f.__name__) result = f(*args, **kwargs) J_LOGGER.debug("{}: {}", close_msg, result) return result
def send(file_name: str): J_LOGGER.info(f"Executing all cells in file: {file_name}...") file_name = str(Path(file_name).absolute()) request_obj = ExecuteAllRequest(file_name=file_name) jupyter_server.request_notebook_command(request_obj) J_LOGGER.info("... Complete")
def start_notebook_server_in_thread(notebook_name: str, server, file_watcher_enabled: bool = False, status_widget=None): """ Args: notebook_name: The name of the notebook you want to be syncing in this process. file_watcher_enabled: If you're going to fire off events from a file watcher in your editor (like in PyCharm), then you don't need to enable this. It will just use the same HTTP requests as normal """ notebook_path = Path(notebook_name).absolute() if not status_widget: status_widget = widgets.Text() status_widget.style.description_width = "300px" display(status_widget) if file_watcher_enabled: assert False, "Currently unsupported." from watchdog.observers import Observer from jupyter_ascending.watchers.file_watcher import NotebookEventHandler event_handler = NotebookEventHandler(str(notebook_path.absolute()), file_watcher_enabled) file_observer = Observer() abs_path = str(notebook_path.parent.absolute()) file_observer.schedule(event_handler, abs_path, recursive=False) file_watcher_thread = threading.Thread(target=file_observer.start, args=tuple()) file_watcher_thread.start() # TODO: This might be a race condition if a bunch of these started at once... notebook_server_port = find_free_port() notebook_executor = HTTPServer( ("localhost", notebook_server_port), NotebookKernelRequestHandler, ) notebook_executor_thread = threading.Thread( target=notebook_executor.serve_forever, args=tuple()) notebook_executor_thread.start() J_LOGGER.info("IPYTHON: Registering notebook {}", notebook_path) request( EXECUTE_HOST_URL, server.register_notebook_server.__name__, # Params notebook_path=str(notebook_path), port_number=notebook_server_port, ) J_LOGGER.info("==> Success") make_comm() return status_widget
def handle_get_status_request(data: dict) -> str: J_LOGGER.info("Attempting get_status") comm = get_comm() comm.send({"command": "get_status"}) J_LOGGER.info("Sent get_status") return f"Updating status"
def request_notebook_command(json_request: GenericJsonRequest): try: request( EXECUTE_HOST_URL, perform_notebook_request.__name__, command_name=_map_json_request_to_function_name(json_request), notebook_path=json_request.file_name, data=attr.asdict(json_request), ) except ConnectionError as e: J_LOGGER.warning(f"Unable to connect to server. Perhaps notebook is not running? {e}") except ReceivedNon2xxResponseError as e: J_LOGGER.warning(f"Unable to process request. Perhaps something else is running on this port? {e}")
def perform_notebook_request(notebook_path: str, command_name: str, data: Dict[str, Any]) -> Optional[Dict]: J_LOGGER.debug("Performing notebook request... ") try: notebook_server = get_server_for_notebook(notebook_path) except UnableToFindNotebookException: J_LOGGER.warning( f"Unabled to find {notebook_path} in {_REGISTERED_SERVERS}") return {"success": False, "notebook_path": notebook_path} request(notebook_server, command_name, data=data) return None
def _find_cell_number(lines: List[str], line_number: int) -> int: cell_index = -1 for index, line in enumerate(lines): if any(pat.match(line) for pat in CELL_SEPARATOR_PATTERNS): J_LOGGER.debug(f"Found another new cell on line number: {index}") cell_index += 1 J_LOGGER.debug(f" New cell index {cell_index}") # Found line number, quit if index == int(line_number): break return cell_index
def send(file_name: str): if f".{SYNC_EXTENSION}.py" not in file_name: return J_LOGGER.info(f"Syncing File: {file_name}...") file_name = str(Path(file_name).absolute()) with open(file_name, "r") as reader: raw_result = reader.read() request_obj = SyncRequest(file_name=file_name, contents=raw_result) jupyter_server.request_notebook_command(request_obj) J_LOGGER.info("... Complete")
def do_POST(self): # Process request request = self.rfile.read(int(self.headers["Content-Length"])).decode() J_LOGGER.info("{} processing request:\n\t\t{}", name, request) response = dispatch(request, methods=methods) J_LOGGER.info("Got Response:\n\t\t{}", response) # Return response self.send_response(response.http_status) self.send_header("Content-Type", "application/json") self.end_headers() self.wfile.write(str(response).encode())
def get_server_for_notebook(notebook_str: str) -> Optional[str]: # Normalize to notebook path notebook_str = notebook_str.replace(f".{SYNC_EXTENSION}.py", f".{SYNC_EXTENSION}.ipynb") J_LOGGER.debug("Finding server for notebook_str, script_path: {}", notebook_str) notebook_path = Path(notebook_str) def get_score_for_name(registered_name: str) -> int: """ Note that it is matching on parts of a path Returns the consecutive count of matching parts of a path, from the end toward the start. registered ['tmp', 'notebooks', 'myfile.py'] notebook ['opt', 'notebooks', 'myfile.py'] -> 2 registered ['a', 'b', 'c'] notebook ['a', 'b', 'd'] -> 0 """ return len( get_matching_tail_tokens(notebook_path.parts, Path(registered_name).parts)) score_by_name = { x: get_score_for_name(x) for x in _REGISTERED_SERVERS.keys() } max_score = max(score_by_name.values()) if max_score <= 0: raise UnableToFindNotebookException( f"Could not find server for notebook_str: {notebook_str}") # Only found one reasonable notebook. best_scores = [k for k, v in score_by_name.items() if v == max_score] if len(best_scores) == 1: notebook_port = _REGISTERED_SERVERS[best_scores[0]] J_LOGGER.debug("Found server at port {}", notebook_port) return _make_url(notebook_port) else: raise UnableToFindNotebookException( f"Could not find server for notebook_str: {notebook_str}")
def start_server_in_thread(): try: server_executor = HTTPServer(EXECUTE_HOST_LOCATION, JupyterServerRequestHandler) except OSError: print(f"It appears you already are using {EXECUTE_HOST_LOCATION}") print("Use the environment variable: 'JUPYTER_ASCENDING_EXECUTE_PORT' to set a new port") return server_executor_thread = threading.Thread(target=server_executor.serve_forever, args=tuple()) server_executor_thread.start() J_LOGGER.info("Successfully started multiplexer server") return server_executor
def _recv(msg): if _get_command(msg) == "merge_notebooks": J_LOGGER.info("GOT UPDATE STATUS") merge_notebooks(jupyter_comm, msg["content"]["data"]) return J_LOGGER.info("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") J_LOGGER.info(msg) J_LOGGER.info("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
def start_notebook_server_in_thread(notebook_name: str, server, status_widget=None): """ Args: notebook_name: The name of the notebook you want to be syncing in this process. """ notebook_path = Path(notebook_name).absolute() if not status_widget: status_widget = widgets.Text() status_widget.style.description_width = "300px" display(status_widget) # TODO: This might be a race condition if a bunch of these started at once... notebook_server_port = find_free_port() notebook_executor = HTTPServer( ("localhost", notebook_server_port), NotebookKernelRequestHandler, ) notebook_executor_thread = threading.Thread( target=notebook_executor.serve_forever, args=tuple()) notebook_executor_thread.start() J_LOGGER.info("IPYTHON: Registering notebook {}", notebook_path) request( EXECUTE_HOST_URL, server.register_notebook_server.__name__, # Params notebook_path=str(notebook_path), port_number=notebook_server_port, ) J_LOGGER.info("==> Success") make_comm() return status_widget
def set_everything_up(): # Note that this is also called from javascript after a kernel restart J_LOGGER.info("Loading Ipython...") # Start the server if it's the right name. notebook_name = get_name_from_python() J_LOGGER.info("IPYTHON: Loading {notebook}", notebook=notebook_name) if f".{SYNC_EXTENSION}.ipynb" not in notebook_name: J_LOGGER.info("IPYTHON: Note loading {notebook} because name does not match", notebook=notebook_name) return J_LOGGER.info("IPYTHON LOAD: " + time.ctime() + ": " + notebook_name) jupyter_notebook.start_notebook_server_in_thread(notebook_name, jupyter_server)
def merge_notebooks(comm: Comm, result: Dict[str, Any]) -> None: javascript_cells = result["javascript_cells"] current_notebook = NotebookContents(cells=[ JupyterCell( index=i, cell_type=x["cell_type"], source=x["source"], output=get_output_text(x), # metadata=x["metadata"], ) for i, x in enumerate(javascript_cells) ]) new_notebook = NotebookContents( cells=[JupyterCell(**x) for x in result["new_notebook"]]) opcodes = opcode_merge_cell_contents(current_notebook, new_notebook) J_LOGGER.info("Performing Opcodes...") J_LOGGER.info(opcodes) net_shift = 0 for op_action in opcodes: net_shift = perform_op_code(comm, op_action, current_notebook, new_notebook, net_shift)
def request_notebook_command(json_request: GenericJsonRequest): try: result = request( EXECUTE_HOST_URL, perform_notebook_request.__name__, command_name=type(json_request).__name__, notebook_path=json_request.file_name, data=attr.asdict(json_request), ) if not result.data.result: return if not result.data.result.get("success", True): raise Exception(f"Failed to complete request. {result.data}") except ConnectionError as e: J_LOGGER.error( f"Unable to connect to server. Perhaps notebook is not running? {e}" ) except ReceivedNon2xxResponseError as e: J_LOGGER.error( f"Unable to process request. Perhaps something else is running on this port? {e}" )
def load_jupyter_server_extension(ipython): ipython.log.info("LOADING SERVER") J_LOGGER.info("SERVER LOAD: " + time.ctime()) server = jupyter_server.start_server_in_thread() if not server: return # HACK: # A bit of a hack to make sure the server gets shutdown when we're done here. # Had some problems with hanging servers # # I think this doesn't quite work if we don't confirm that we want the server shutdown. # Oh well for now... ORIGINAL = None def shutdown_from_signal(*args, **kwargs): if ORIGINAL: ORIGINAL(*args, **kwargs) J_LOGGER.info("SERVER: Shutting down server") server.shutdown() ORIGINAL = signal.signal(signal.SIGINT, shutdown_from_signal)
def perform_notebook_request(notebook_path: str, command_name: str, data: Dict[str, Any]) -> None: J_LOGGER.debug("Performing notebook request... ") notebook_server = get_server_for_notebook(notebook_path) if notebook_server is None: J_LOGGER.warning("==> Unable to process request") J_LOGGER.warning("==> {}", _REGISTERED_SERVERS) return request(notebook_server, command_name, data=data)
def load_ipython_extension(ipython): J_LOGGER.info("Loading Ipython...") # Add %start_notebook_syncing ipython.register_magics(SyncMagic) # Start the server if it's the right name. notebook_name = get_name_from_python() J_LOGGER.info("IPYTHON: Loading {notebook}", notebook=notebook_name) if ".synced.ipynb" not in notebook_name: J_LOGGER.info( "IPYTHON: Note loading {notebook} because name does not match", notebook=notebook_name) return J_LOGGER.info("IPYTHON LOAD: " + time.ctime() + ": " + notebook_name) jupyter_notebook.start_notebook_server_in_thread(notebook_name, jupyter_server)
def get_server_for_notebook(notebook_path: str) -> Optional[str]: # Normalize to notebook path notebook_path = notebook_path.replace(".synced.py", ".synced.ipynb") J_LOGGER.debug("Finding server for notebook_path, script_path: {}", notebook_path) potential_notebooks: List[str] = [] for registered_name in _REGISTERED_SERVERS: if registered_name in notebook_path: potential_notebooks.append(registered_name) if len(potential_notebooks) > 1: J_LOGGER.warning("Found more than one notebook {}, {}", notebook_path, potential_notebooks) return None elif len(potential_notebooks) == 1: notebook_port = _REGISTERED_SERVERS[potential_notebooks[0]] J_LOGGER.debug("Found server at port {}", notebook_port) return f"http://localhost:{notebook_port}" else: J_LOGGER.warning("Could not find server for notebook_path: {}", notebook_path) return None
def send(file_name: str, line_number: int, *args, **kwargs): J_LOGGER.debug("Starting execute request") # Always pass absolute path file_name = str(Path(file_name).absolute()) request_obj = partial(ExecuteRequest, file_name=file_name, contents="") with open(file_name, "r") as reader: lines = reader.readlines() cell_index = _find_cell_number(lines, line_number) final_request = request_obj(cell_index=cell_index) J_LOGGER.info(f"Sending request with {final_request}") jupyter_server.request_notebook_command(final_request) J_LOGGER.info("... Complete")
def shutdown_from_signal(*args, **kwargs): if ORIGINAL: ORIGINAL(*args, **kwargs) J_LOGGER.info("SERVER: Shutting down server") server.shutdown()
def log_message(self, format, *args): J_LOGGER.debug(args)
def _recv(msg): print(msg) J_LOGGER.warning(msg)
def send(file_name: str, line_number: int, *args, **kwargs): J_LOGGER.debug("Starting execute request") # Always pass absolute path file_name = str(Path(file_name).absolute()) request_obj = partial(ExecuteRequest, file_name=file_name, contents="") with open(file_name, "r") as reader: lines = reader.readlines() cell_index = _find_cell_number(lines, line_number) final_request = request_obj(cell_index=cell_index) J_LOGGER.info(f"Sending request with {final_request}") jupyter_server.request_notebook_command(final_request) J_LOGGER.info("... Complete") if __name__ == "__main__": J_LOGGER.disable("__main__") parser = argparse.ArgumentParser() parser.add_argument("--filename", help="Filename to send") parser.add_argument("--linenumber", help="Line number that the cursor is currently on") arguments = parser.parse_args() send(arguments.filename, arguments.linenumber)
def perform_op_code( comm: Comm, op_action: OpCodeAction, current_notebook: NotebookContents, updated_notebook: NotebookContents, net_shift: int, ) -> int: """ net_shift (int): Tracks the net shift of previous op codes since we can't apply all the operations at the same time to jupyter, since it does not have that kind of editting model. So what we do is make sure that as we delete and insert, we keep track of the shifts that have happened thus far. Given this shift, we will shift the actions that we tell Jupyter notebook to do. """ if op_action.op_code == OpCodes.EQUAL: pass elif op_action.op_code == OpCodes.DELETE: J_LOGGER.info(f"Performing Delete: {op_action}") # Since deletion is a bit goofy for jupyter, so it has to be adjusted by net shift thus far. cells_to_delete = [x + net_shift for x in range(*op_action.current)] comm.send({ "command": "op_code__delete_cells", "cell_indices": cells_to_delete }) net_shift = net_shift - len(cells_to_delete) elif op_action.op_code == OpCodes.INSERT: J_LOGGER.info(f"Performing Insert: {op_action}") cells_to_insert = list(range(*op_action.updated)) for cell_number in cells_to_insert: comm.send({ "command": "op_code__insert_cell", "cell_number": cell_number, "cell_type": updated_notebook.cells[cell_number].cell_type, "cell_contents": updated_notebook.cells[cell_number].joined_source, }) net_shift = net_shift + len(cells_to_insert) elif op_action.op_code == OpCodes.REPLACE: # Keep track of what the current cells looked like before. current_cells = list(range(*op_action.current)) updated_cells = list(range(*op_action.updated)) for cell_number in updated_cells: # If we have current cells we're replacing, do that. if current_cells: current_cells.pop(0) comm.send({ "command": "op_code__replace_cell", "cell_number": cell_number, "cell_type": updated_notebook.cells[cell_number].cell_type, "cell_contents": updated_notebook.cells[cell_number].joined_source, }) # Otherwise, we have new cells to insert so we don't overwrite existing cells else: net_shift = perform_op_code( comm, OpCodeAction( op_code=OpCodes.INSERT, # NOTE: This is intentionally the last index for both of these current_start_idx=op_action.current_final_idx, current_final_idx=op_action.current_final_idx, updated_start_idx=cell_number, updated_final_idx=cell_number + 1, ), current_notebook, updated_notebook, net_shift, ) # If we have cells left over from the replace (i.e. 1-4 replaced with 1-2), # then we need to delete the rest of them. if current_cells: net_shift = perform_op_code( comm, OpCodeAction( op_code=OpCodes.DELETE, current_start_idx=current_cells[0], current_final_idx=current_cells[-1] + 1, # NOTE: This is intentionally the last index for both of these updated_start_idx=op_action.updated_final_idx, updated_final_idx=op_action.updated_final_idx, ), current_notebook, updated_notebook, net_shift, ) else: raise NotImplementedError return net_shift