class JupyterComm(Comm): """ JupyterComm provides a Comm for the notebook which is initialized the first time data is pushed to the frontend. """ js_template = """ function msg_handler(msg) {{ var metadata = msg.metadata; var buffers = msg.buffers; var msg = msg.content.data; if ((metadata.msg_type == "Ready")) {{ if (metadata.content) {{ console.log("Python callback returned following output:", metadata.content); }} }} else if (metadata.msg_type == "Error") {{ console.log("Python failed with the following traceback:", metadata.traceback) }} else {{ {msg_handler} }} }} if ((window.PyViz == undefined) || (!window.PyViz.comm_manager)) {{ console.log("Could not find comm manager") }} else {{ window.PyViz.comm_manager.register_target('{plot_id}', '{comm_id}', msg_handler); }} """ def init(self): from ipykernel.comm import Comm as IPyComm if self._comm: return self._comm = IPyComm(target_name=self.id, data={}) self._comm.on_msg(self._handle_msg) @classmethod def decode(cls, msg): """ Decodes messages following Jupyter messaging protocol. If JSON decoding fails data is assumed to be a regular string. """ return msg['content']['data'] def close(self): """ Closes the comm connection """ if self._comm: self._comm.close() def send(self, data=None, metadata=None, buffers=[]): """ Pushes data across comm socket. """ if not self._comm: self.init() self.comm.send(data, metadata=metadata, buffers=buffers)
class WidgetCommSocket(CommSocket): """ CustomCommSocket provides communication between the IPython kernel and a matplotlib canvas element in the notebook. A CustomCommSocket is required to delay communication between the kernel and the canvas element until the widget has been rendered in the notebook. """ def __init__(self, manager): self.supports_binary = None self.manager = manager self.uuid = str(uuid.uuid4()) self.html = "<div id=%r></div>" % self.uuid def start(self): try: # Jupyter/IPython 4.0 from ipykernel.comm import Comm except: # IPython <=3.0 from IPython.kernel.comm import Comm try: self.comm = Comm('matplotlib', data={'id': self.uuid}) except AttributeError: raise RuntimeError('Unable to create an IPython notebook Comm ' 'instance. Are you in the IPython notebook?') self.comm.on_msg(self.on_message) self.comm.on_close(lambda close_message: self.manager.clearup_closed())
class NbAggCommSocket(CommSocket): """ NbAggCommSocket subclasses the matplotlib CommSocket allowing the opening of a comms channel to be delayed until the plot is displayed. """ def __init__(self, manager, target=None): self.supports_binary = None self.manager = manager self.target = uuid.uuid4().hex if target is None else target self.html = "<div id=%r></div>" % self.target def start(self): try: # Jupyter/IPython 4.0 from ipykernel.comm import Comm except: # IPython <=3.0 from IPython.kernel.comm import Comm try: self.comm = Comm('matplotlib', data={'id': self.target}) except AttributeError: raise RuntimeError('Unable to create an IPython notebook Comm ' 'instance. Are you in the IPython notebook?') self.comm.on_msg(self.on_message) self.comm.on_close(lambda close_message: self.manager.clearup_closed())
def _open_comm(comm: Comm, msg): _set_name_hook(lambda display_name, display_id: _send_new_display_handle( comm, display_name, display_id)) _set_new_part_hook(lambda part_name, part_cls: _send_part( comm, part_name, part_cls.get_metadata())) comm.on_msg(lambda msg: _on_msg(comm, msg)) comm.on_close(_close_comm)
def create_comm(target: str, data: dict = None, callback: callable = None, **kwargs): """Create ipykernel message comm.""" # create comm on python site comm = Comm(target_name=target, data=data, **kwargs) comm.on_msg(callback) return comm
class Component(LoggingConfigurable): comm = Instance('ipykernel.comm.Comm', allow_none=True) _module = None _msg_callbacks = Instance(CallbackDispatcher, ()) @property def module(self): if self._module is not None: return self._module else: return self.__class__.__name__ def __init__(self, target_name='jupyter.react', props={}, comm=None): self.target_name = target_name self.props = props if comm is None: self.open(props) else: self.comm = comm self.comm.on_close(self.close) def open(self, props): props['module'] = self.module args = dict(target_name=self.target_name, data=props) args['comm_id'] = 'jupyter_react.{}.{}'.format( uuid.uuid4(), props['module'] ) self.comm = Comm(**args) @observe('comm') def _comm_changed(self, change): if change['new'] is None: return self.comm.on_msg(self._handle_msg) def __del__(self): self.comm.close() self.close(None) def close(self, msg): if self.comm is not None: self.comm = None self._ipython_display_ = None def send(self, data): self.comm.send( data ) def _ipython_display_(self, **kwargs): self.send({"method": "display"}) def _handle_msg(self, msg): if 'content' in msg: self._msg_callbacks(self, msg['content'], msg['buffers']) def on_msg(self, callback, remove=False): self._msg_callbacks.register_callback(callback, remove=remove)
class Component(LoggingConfigurable): comm = Instance('ipykernel.comm.Comm', allow_none=True) _module = None _msg_callbacks = Instance(CallbackDispatcher, ()) @property def module(self): if self._module is not None: return self._module else: return self.__class__.__name__ def __init__(self, target_name='jupyter.react', props={}, comm=None): self.target_name = target_name self.props = props if comm is None: self.open(props) else: self.comm = comm def open(self, props): props['module'] = self.module args = dict(target_name=self.target_name, data=props) args['comm_id'] = 'jupyter_react.{}.{}'.format(uuid.uuid4(), props['module']) self.comm = Comm(**args) @observe('comm') def _comm_changed(self, change): if change['new'] is None: return self.comm.on_msg(self._handle_msg) def __del__(self): self.close() def close(self): if self.comm is not None: self.comm.close() self.comm = None self._ipython_display_ = None def send(self, data): self.comm.send(data) def _ipython_display_(self, **kwargs): self.send({"method": "display"}) def _handle_msg(self, msg): if 'content' in msg: self._msg_callbacks(self, msg['content'], msg['buffers']) def on_msg(self, callback, remove=False): self._msg_callbacks.register_callback(callback, remove=remove)
class JupyterComm(Comm): """ JupyterComm provides a Comm for the notebook which is initialized the first time data is pushed to the frontend. """ template = """ <script> function msg_handler(msg) {{ var msg = msg.content.data; {msg_handler} }} if ((window.Jupyter !== undefined) && (Jupyter.notebook.kernel != null)) {{ comm_manager = Jupyter.notebook.kernel.comm_manager; comm_manager.register_target("{comm_id}", function(comm) {{ comm.on_msg(msg_handler);}}); }} </script> <div id="fig_{comm_id}"> {init_frame} </div> """ def init(self): from ipykernel.comm import Comm as IPyComm if self._comm: return self._comm = IPyComm(target_name=self.id, data={}) self._comm.on_msg(self._handle_msg) @classmethod def decode(cls, msg): """ Decodes messages following Jupyter messaging protocol. If JSON decoding fails data is assumed to be a regular string. """ return msg['content']['data'] def send(self, data=None, buffers=[]): """ Pushes data across comm socket. """ if not self._comm: self.init() self.comm.send(data, buffers=buffers)
def connect(): """ establish connection to frontend notebook """ if not is_notebook(): print('Python session is not running in a Notebook Kernel') return global _comm kernel=get_ipython().kernel kernel.comm_manager.register_target('tdb',handle_comm_opened) # initiate connection to frontend. _comm=Comm(target_name='tdb',data={}) # bind recv handler _comm.on_msg(None)
def connect(): """ establish connection to frontend notebook """ if not is_notebook(): print('Python session is not running in a Notebook Kernel') return global _comm kernel = get_ipython().kernel kernel.comm_manager.register_target('tdb', handle_comm_opened) # initiate connection to frontend. _comm = Comm(target_name='tdb', data={}) # bind recv handler _comm.on_msg(None)
class JupyterComm(Comm): """ JupyterComm provides a Comm for the notebook which is initialized the first time data is pushed to the frontend. """ js_template = """ function msg_handler(msg) {{ var buffers = msg.buffers; var msg = msg.content.data; {msg_handler} }} window.PyViz.comm_manager.register_target('{plot_id}', '{comm_id}', msg_handler); """ def init(self): from ipykernel.comm import Comm as IPyComm if self._comm: return self._comm = IPyComm(target_name=self.id, data={}) self._comm.on_msg(self._handle_msg) @classmethod def decode(cls, msg): """ Decodes messages following Jupyter messaging protocol. If JSON decoding fails data is assumed to be a regular string. """ return msg['content']['data'] def close(self): """ Closes the comm connection """ if self._comm: self._comm.close() def send(self, data=None, buffers=[]): """ Pushes data across comm socket. """ if not self._comm: self.init() self.comm.send(data, buffers=buffers)
class JulynterComm(object): """Julynter comm hadler""" # pylint: disable=useless-object-inheritance def __init__(self, shell=None): self.shell = shell self.name = 'julynter.comm' self.comm = None def register(self): """Register comm""" self.comm = Comm(self.name) self.comm.on_msg(self.receive) self.send({'operation': 'init'}) def receive(self, msg): """Receive lint request""" def send(self, data): """Receive send results""" self.comm.send(data)
class BrowserContext(object): """Represents an in-browser context.""" def __init__(self): """Constructor""" self._calls = 0 self._callbacks = {} # Push the Javascript to the front-end. with open(os.path.join(os.path.split(__file__)[0], 'backend_context.js'), 'r') as f: display(Javascript(data=f.read())) # Open communication with the front-end. self._comm = Comm(target_name='BrowserContext') self._comm.on_msg(self._on_msg) def _on_msg(self, msg): """Handle messages from the front-end""" data = msg['content']['data'] # If the message is a call invoke, run the function and send # the results. if 'callback' in data: guid = data['callback'] callback = callback_registry[guid] args = data['arguments'] args = [self.deserialize(a) for a in args] index = data['index'] results = callback(*args) return self.serialize(self._send('return', index=index, results=results)) # The message is not a call invoke, it must be an object # that is a response to a Python request. else: index = data['index'] immutable = data['immutable'] value = data['value'] if index in self._callbacks: self._callbacks[index].resolve({ 'immutable': immutable, 'value': value }) del self._callbacks[index] def serialize(self, obj): """Serialize an object for sending to the front-end.""" if hasattr(obj, '_jsid'): return {'immutable': False, 'value': obj._jsid} else: obj_json = {'immutable': True} try: json.dumps(obj) obj_json['value'] = obj except: pass if callable(obj): guid = str(uuid.uuid4()) callback_registry[guid] = obj obj_json['callback'] = guid return obj_json def deserialize(self, obj): """Deserialize an object from the front-end.""" if obj['immutable']: return obj['value'] else: guid = obj['value'] if not guid in object_registry: instance = JSObject(self, guid) object_registry[guid] = instance return object_registry[guid] # Message types def getattr(self, parent, child): return self._send('getattr', parent=parent, child=child) def setattr(self, parent, child, value): return self._send('setattr', parent=parent, child=child, value=value) def apply(self, parent, function, *pargs): return self._send('apply', parent=parent, function=function, args=pargs) def _send(self, method, **parameters): """Sends a message to the front-end and returns a promise.""" msg = { 'index': self._calls, 'method': method, } msg.update(parameters) promise = SimplePromise() self._callbacks[self._calls] = promise self._calls += 1 self._comm.send(msg) return promise
class Nep: # def __init__(self,comm_name=None): # if comm_name is None: # comm_name=str(uuid.uuid4()) def __init__(self, comm=None, kernel_id=None): # self.comm = Comm(target_name=comm_name) # self.comm = Comm(target_name="neos_comm") print(kernel_id) self.kernel_id = kernel_id if comm is None: self.comm = Comm(target_name="neos_comm") else: self.comm = comm self.comm.open() self.vars = Variables(self.comm, self) self.comm.on_msg(self._on_msg) self.vars_to_update = [] self.var_types = {} self.neos_updates_locked = False self.var_temp_vals = {} def start(self, base='http://localhost:8888', notebook_path='/Untitled.ipynb', auth_token='', ws_port=8766): if self.kernel_id is None: server_process = Process(target=run_server, args=(self.comm.comm_id, base, notebook_path, auth_token, ws_port), daemon=True) else: server_process = Process(target=run_server_from_id, args=(self.comm.comm_id, self.kernel_id, auth_token, ws_port), daemon=True) server_process.start() #guido forgive me coz i know this is ugly static_server_process = Process(target=subprocess.call, args=('python -m http.server 8000', ), kwargs={"shell": True}) static_server_process.start() #TODO: nep.stop() !! def _on_msg(self, msg): #handler for message recived for this Nep #we update the value of the variable msg = msg["content"]["data"] i = msg.index("/") msg_format_correct = (i != -1) if msg_format_correct: varname = msg[:i] if varname in self.vars_to_update: val_str = msg[i + 1:] if self.var_types[varname] == "float": varvalue = float(val_str) elif self.var_types[varname] == "int": varvalue = int(val_str) elif self.var_types[varname] == "float_vec": val_str = val_str[1:-1] varvalue = tuple([float(x) for x in val_str.split(";")]) elif self.var_types[varname] == "int_vec": val_str = val_str[1:-1] varvalue = tuple([int(x) for x in val_str.split(";")]) elif self.var_types[varname] == "list": varvalue = val_str.split("|")[:-1] else: varvalue = val_str if not self.neos_updates_locked: setattr(Variables, "_" + varname, varvalue) else: self.var_temp_vals[varname] = varvalue else: print("Warning: Neos is trying to update variable " + varname + " that is not Nep's vars_to_update") else: print( "Warning: Neos message type not supported (it doesn't have the format varname/varvalue)" ) def _send_var(self, var_name, var_value): var_type = type(var_value) value_str = "" if var_type is str: value_str = var_value elif var_type is tuple: value_str = "[" + ";" + join([str(x) for x in var_value]) + "]" elif var_type is list: value_str = "|" + join([str(x) for x in var_value]) + "|" else: value_str = str(var_value) self.comm.send("updateVar/" + var_name + "/" + value_str) def send(self, var_name, custom_name=None, value=None): var_value = value #IDEA: Maybe put this functionality in another method. send_custom or something! if value is None: frame = inspect.currentframe() locals = frame.f_back.f_locals # local variables from calling scope var_value = locals[var_name] if custom_name is not None: var_name = custom_name self._send_var(var_name, var_value) def bind(self, varname, callback=None, type="float", update_neos=True, update_python=True): prop = property(fset=Variables._generate_set(varname, update_neos, callback), fget=lambda self: Variables.__dict__["_" + varname], fdel=Variables._generate_del(varname, update_neos)) setattr(Variables, "_" + varname, None) setattr(Variables, varname, prop) self.comm.send("addVar/" + varname) if update_python: if varname not in self.vars_to_update: self.vars_to_update.append(varname) self.var_types[varname] = type def plot(self, plt): plt.plot() figname = "img/" + uuid.uuid1().hex + ".png" plt.savefig(figname) self.comm.send("media/" + "http://localhost:8000/" + figname) def listen(self, varname): frame = inspect.currentframe() locals = frame.f_back.f_locals # local variables from calling scope #TODO: this one only upates the local variable when neos changes the variable def lock(self): #freeze updating of variables from Neos, and instead update to a temp storage of variables self.neos_updates_locked = True def unlock(self): #unfreeze the variables from Neos, and update them according to the stored updates self.neos_updates_locked = False for varname in self.var_temp_vals: setattr(Variables, "_" + varname, self.var_temp_vals[varname]) self.var_temp_vals = {} def reactive_loop(self, function, iterable, *args, **kwargs): #TODO: iterate function with iterable, unlocking and locking the self.vars before every iteration. # run iteration in another thread to allow for neos to update the variables between each iteration def loop(): for it in iterable: self.lock() function(it, *args, **kwargs) self.unlock() t = threading.Thread(target=loop) t.start() pass
class Widget(LoggingConfigurable): #------------------------------------------------------------------------- # Class attributes #------------------------------------------------------------------------- _widget_construction_callback = None widgets = {} widget_types = {} @staticmethod def on_widget_constructed(callback): """Registers a callback to be called when a widget is constructed. The callback must have the following signature: callback(widget)""" Widget._widget_construction_callback = callback @staticmethod def _call_widget_constructed(widget): """Static method, called when a widget is constructed.""" if Widget._widget_construction_callback is not None and callable(Widget._widget_construction_callback): Widget._widget_construction_callback(widget) @staticmethod def handle_comm_opened(comm, msg): """Static method, called when a widget is constructed.""" class_name = str(msg['content']['data']['widget_class']) if class_name in Widget.widget_types: widget_class = Widget.widget_types[class_name] else: widget_class = import_item(class_name) widget = widget_class(comm=comm) #------------------------------------------------------------------------- # Traits #------------------------------------------------------------------------- _model_module = Unicode('jupyter-js-widgets', help="""A requirejs module name in which to find _model_name. If empty, look in the global registry.""").tag(sync=True) _model_name = Unicode('WidgetModel', help="""Name of the backbone model registered in the front-end to create and sync this widget with.""").tag(sync=True) _view_module = Unicode(None, allow_none=True, help="""A requirejs module in which to find _view_name. If empty, look in the global registry.""").tag(sync=True) _view_name = Unicode(None, allow_none=True, help="""Default view registered in the front-end to use to represent the widget.""").tag(sync=True) comm = Instance('ipykernel.comm.Comm', allow_none=True) msg_throttle = Int(3, help="""Maximum number of msgs the front-end can send before receiving an idle msg from the back-end.""").tag(sync=True) keys = List() def _keys_default(self): return [name for name in self.traits(sync=True)] _property_lock = Dict() _holding_sync = False _states_to_send = Set() _display_callbacks = Instance(CallbackDispatcher, ()) _msg_callbacks = Instance(CallbackDispatcher, ()) #------------------------------------------------------------------------- # (Con/de)structor #------------------------------------------------------------------------- def __init__(self, **kwargs): """Public constructor""" self._model_id = kwargs.pop('model_id', None) super(Widget, self).__init__(**kwargs) Widget._call_widget_constructed(self) self.open() def __del__(self): """Object disposal""" self.close() #------------------------------------------------------------------------- # Properties #------------------------------------------------------------------------- def open(self): """Open a comm to the frontend if one isn't already open.""" if self.comm is None: state, buffer_keys, buffers = self._split_state_buffers(self.get_state()) args = dict(target_name='jupyter.widget', data=state) if self._model_id is not None: args['comm_id'] = self._model_id self.comm = Comm(**args) if buffers: # FIXME: workaround ipykernel missing binary message support in open-on-init # send state with binary elements as second message self.send_state() def _comm_changed(self, name, new): """Called when the comm is changed.""" if new is None: return self._model_id = self.model_id self.comm.on_msg(self._handle_msg) Widget.widgets[self.model_id] = self @property def model_id(self): """Gets the model id of this widget. If a Comm doesn't exist yet, a Comm will be created automagically.""" return self.comm.comm_id #------------------------------------------------------------------------- # Methods #------------------------------------------------------------------------- def close(self): """Close method. Closes the underlying comm. When the comm is closed, all of the widget views are automatically removed from the front-end.""" if self.comm is not None: Widget.widgets.pop(self.model_id, None) self.comm.close() self.comm = None self._ipython_display_ = None def _split_state_buffers(self, state): """Return (state_without_buffers, buffer_keys, buffers) for binary message parts""" buffer_keys, buffers = [], [] for k, v in list(state.items()): if isinstance(v, _binary_types): state.pop(k) buffers.append(v) buffer_keys.append(k) return state, buffer_keys, buffers def send_state(self, key=None): """Sends the widget state, or a piece of it, to the front-end. Parameters ---------- key : unicode, or iterable (optional) A single property's name or iterable of property names to sync with the front-end. """ state = self.get_state(key=key) state, buffer_keys, buffers = self._split_state_buffers(state) msg = {'method': 'update', 'state': state, 'buffers': buffer_keys} self._send(msg, buffers=buffers) def get_state(self, key=None): """Gets the widget state, or a piece of it. Parameters ---------- key : unicode or iterable (optional) A single property's name or iterable of property names to get. Returns ------- state : dict of states metadata : dict metadata for each field: {key: metadata} """ if key is None: keys = self.keys elif isinstance(key, string_types): keys = [key] elif isinstance(key, collections.Iterable): keys = key else: raise ValueError("key must be a string, an iterable of keys, or None") state = {} traits = self.traits() if not PY3 else {} # no need to construct traits on PY3 for k in keys: to_json = self.trait_metadata(k, 'to_json', self._trait_to_json) value = to_json(getattr(self, k), self) if not PY3 and isinstance(traits[k], Bytes) and isinstance(value, bytes): value = memoryview(value) state[k] = value return state def set_state(self, sync_data): """Called when a state is received from the front-end.""" # The order of these context managers is important. Properties must # be locked when the hold_trait_notification context manager is # released and notifications are fired. with self._lock_property(**sync_data), self.hold_trait_notifications(): for name in sync_data: if name in self.keys: from_json = self.trait_metadata(name, 'from_json', self._trait_from_json) self.set_trait(name, from_json(sync_data[name], self)) def send(self, content, buffers=None): """Sends a custom msg to the widget model in the front-end. Parameters ---------- content : dict Content of the message to send. buffers : list of binary buffers Binary buffers to send with message """ self._send({"method": "custom", "content": content}, buffers=buffers) def on_msg(self, callback, remove=False): """(Un)Register a custom msg receive callback. Parameters ---------- callback: callable callback will be passed three arguments when a message arrives:: callback(widget, content, buffers) remove: bool True if the callback should be unregistered.""" self._msg_callbacks.register_callback(callback, remove=remove) def on_displayed(self, callback, remove=False): """(Un)Register a widget displayed callback. Parameters ---------- callback: method handler Must have a signature of:: callback(widget, **kwargs) kwargs from display are passed through without modification. remove: bool True if the callback should be unregistered.""" self._display_callbacks.register_callback(callback, remove=remove) def add_traits(self, **traits): """Dynamically add trait attributes to the Widget.""" super(Widget, self).add_traits(**traits) for name, trait in traits.items(): if trait.get_metadata('sync'): self.keys.append(name) self.send_state(name) def notify_change(self, change): """Called when a property has changed.""" # Send the state before the user registered callbacks for trait changes # have all fired. name = change['name'] if self.comm is not None and name in self.keys: # Make sure this isn't information that the front-end just sent us. if self._should_send_property(name, change['new']): # Send new state to front-end self.send_state(key=name) LoggingConfigurable.notify_change(self, change) #------------------------------------------------------------------------- # Support methods #------------------------------------------------------------------------- @contextmanager def _lock_property(self, **properties): """Lock a property-value pair. The value should be the JSON state of the property. NOTE: This, in addition to the single lock for all state changes, is flawed. In the future we may want to look into buffering state changes back to the front-end.""" self._property_lock = properties try: yield finally: self._property_lock = {} @contextmanager def hold_sync(self): """Hold syncing any state until the outermost context manager exits""" if self._holding_sync is True: yield else: try: self._holding_sync = True yield finally: self._holding_sync = False self.send_state(self._states_to_send) self._states_to_send.clear() def _should_send_property(self, key, value): """Check the property lock (property_lock)""" to_json = self.trait_metadata(key, 'to_json', self._trait_to_json) if (key in self._property_lock and to_json(value, self) == self._property_lock[key]): return False elif self._holding_sync: self._states_to_send.add(key) return False else: return True # Event handlers @_show_traceback def _handle_msg(self, msg): """Called when a msg is received from the front-end""" data = msg['content']['data'] method = data['method'] # Handle backbone sync methods CREATE, PATCH, and UPDATE all in one. if method == 'backbone': if 'sync_data' in data: # get binary buffers too sync_data = data['sync_data'] for i,k in enumerate(data.get('buffer_keys', [])): sync_data[k] = msg['buffers'][i] self.set_state(sync_data) # handles all methods # Handle a state request. elif method == 'request_state': self.send_state() # Handle a custom msg from the front-end. elif method == 'custom': if 'content' in data: self._handle_custom_msg(data['content'], msg['buffers']) # Catch remainder. else: self.log.error('Unknown front-end to back-end widget msg with method "%s"' % method) def _handle_custom_msg(self, content, buffers): """Called when a custom msg is received.""" self._msg_callbacks(self, content, buffers) def _handle_displayed(self, **kwargs): """Called when a view has been displayed for this widget instance""" self._display_callbacks(self, **kwargs) @staticmethod def _trait_to_json(x, self): """Convert a trait value to json.""" return x @staticmethod def _trait_from_json(x, self): """Convert json values to objects.""" return x def _ipython_display_(self, **kwargs): """Called when `IPython.display.display` is called on the widget.""" def loud_error(message): self.log.warn(message) sys.stderr.write('%s\n' % message) # Show view. if self._view_name is not None: validated = Widget._version_validated # Before the user tries to display a widget. Validate that the # widget front-end is what is expected. if validated is None: loud_error('Widget Javascript not detected. It may not be installed properly.') elif not validated: loud_error('The installed widget Javascript is the wrong version.') # TODO: delete this sending of a comm message when the display statement # below works. Then add a 'text/plain' mimetype to the dictionary below. self._send({"method": "display"}) # The 'application/vnd.jupyter.widget' mimetype has not been registered yet. # See the registration process and naming convention at # http://tools.ietf.org/html/rfc6838 # and the currently registered mimetypes at # http://www.iana.org/assignments/media-types/media-types.xhtml. # We don't have a 'text/plain' entry so that the display message will be # will be invisible in the current notebook. data = { 'application/vnd.jupyter.widget': self._model_id } display(data, raw=True) self._handle_displayed(**kwargs) def _send(self, msg, buffers=None): """Sends a message to the model in the front-end.""" self.comm.send(data=msg, buffers=buffers)
class WWTLabApplication(BaseWWTWidget): """ A handle the WWT JupyterLab application. While other parts of pywwt create "widgets", bound to variables running inside Python notebooks, this class represents a connection to the standalone "application", which exists in JupyterLab independently of any one specific notebook. The Python API is the same, it's just that the JSON messages we send are routed to the separate application rather than our own iframe. """ _comm = None _controls = None # View state that gets synchronized back to us. This is the same scheme as # the widget, just with manual synchronization over our comm to the viewer # app. _raRad = 0.0 _decRad = 0.0 _fovDeg = 60.0 _engineTime = Time('2017-03-09T12:30:00', format='isot') _systemTime = Time('2017-03-09T12:30:00', format='isot') _timeRate = 1.0 def __init__(self): self._comm = Comm(target_name='@wwtelescope/jupyterlab:research', data={}) self._comm.on_msg(self._on_message_received) self._comm.open() self._send_msg(event='trigger') # get bidirectional updates flowing BaseWWTWidget.__init__(self) def _send_msg(self, **kwargs): self._comm.send(kwargs) def _on_message_received(self, msg): payload = msg['content']['data'] if payload['type'] != 'wwt_view_state': return try: self._raRad = float(payload['raRad']) self._decRad = float(payload['decRad']) self._fovDeg = float(payload['fovDeg']) self._engineTime = Time(payload['engineClockISOT'], format='isot') self._systemTime = Time(payload['systemClockISOT'], format='isot') self._timeRate = float(payload['engineClockRateFactor']) except ValueError: pass # report a warning somehow? def _serve_file(self, filename, extension=''): return serve_file(filename, extension=extension) def _get_view_data(self, field): if field == 'ra': return self._raRad * R2H elif field == 'dec': return self._decRad * R2D elif field == 'fov': return self._fovDeg elif field == 'datetime': engine_delta = self._timeRate * (Time.now() - self._systemTime) return self._engineTime + engine_delta else: raise ValueError('internal problem: unexpected "field" value') def _create_image_layer(self, **kwargs): """Returns a specialized subclass of ImageLayer that has some extra hooks for creating UI control points. """ return JupyterImageLayer(parent=self, **kwargs) @property def layer_controls(self): if self._controls is None: opacity_slider = widgets.FloatSlider(value=self.foreground_opacity, min=0, max=1, readout=False) foreground_menu = widgets.Dropdown(options=self.available_layers, value=self.foreground) background_menu = widgets.Dropdown(options=self.available_layers, value=self.background) link((opacity_slider, 'value'), (self, 'foreground_opacity')) link((foreground_menu, 'value'), (self, 'foreground')) link((background_menu, 'value'), (self, 'background')) self._controls = widgets.HBox([background_menu, opacity_slider, foreground_menu]) return self._controls
class Widget(LoggingHasTraits): #------------------------------------------------------------------------- # Class attributes #------------------------------------------------------------------------- _widget_construction_callback = None # widgets is a dictionary of all active widget objects widgets = {} # widget_types is a registry of widgets by module, version, and name: widget_types = WidgetRegistry() @staticmethod def on_widget_constructed(callback): """Registers a callback to be called when a widget is constructed. The callback must have the following signature: callback(widget)""" Widget._widget_construction_callback = callback @staticmethod def _call_widget_constructed(widget): """Static method, called when a widget is constructed.""" if Widget._widget_construction_callback is not None and callable( Widget._widget_construction_callback): Widget._widget_construction_callback(widget) @staticmethod def handle_comm_opened(comm, msg): """Static method, called when a widget is constructed.""" version = msg.get('metadata', {}).get('version', '') if version.split('.')[0] != PROTOCOL_VERSION_MAJOR: raise ValueError( "Incompatible widget protocol versions: received version %r, expected version %r" % (version, __protocol_version__)) data = msg['content']['data'] state = data['state'] # Find the widget class to instantiate in the registered widgets widget_class = Widget.widget_types.get(state['_model_module'], state['_model_module_version'], state['_model_name'], state['_view_module'], state['_view_module_version'], state['_view_name']) widget = widget_class(comm=comm) if 'buffer_paths' in data: _put_buffers(state, data['buffer_paths'], msg['buffers']) widget.set_state(state) @staticmethod def get_manager_state(drop_defaults=False, widgets=None): """Returns the full state for a widget manager for embedding :param drop_defaults: when True, it will not include default value :param widgets: list with widgets to include in the state (or all widgets when None) :return: """ state = {} if widgets is None: widgets = Widget.widgets.values() for widget in widgets: state[widget.model_id] = widget._get_embed_state( drop_defaults=drop_defaults) return {'version_major': 2, 'version_minor': 0, 'state': state} def _get_embed_state(self, drop_defaults=False): state = { 'model_name': self._model_name, 'model_module': self._model_module, 'model_module_version': self._model_module_version } model_state, buffer_paths, buffers = _remove_buffers( self.get_state(drop_defaults=drop_defaults)) state['state'] = model_state if len(buffers) > 0: state['buffers'] = [{ 'encoding': 'base64', 'path': p, 'data': standard_b64encode(d).decode('ascii') } for p, d in zip(buffer_paths, buffers)] return state def get_view_spec(self): return dict(version_major=2, version_minor=0, model_id=self._model_id) #------------------------------------------------------------------------- # Traits #------------------------------------------------------------------------- _model_name = Unicode('WidgetModel', help="Name of the model.", read_only=True).tag(sync=True) _model_module = Unicode('@jupyter-widgets/base', help="The namespace for the model.", read_only=True).tag(sync=True) _model_module_version = Unicode( __jupyter_widgets_base_version__, help="A semver requirement for namespace version containing the model.", read_only=True).tag(sync=True) _view_name = Unicode(None, allow_none=True, help="Name of the view.").tag(sync=True) _view_module = Unicode(None, allow_none=True, help="The namespace for the view.").tag(sync=True) _view_module_version = Unicode( '', help= "A semver requirement for the namespace version containing the view." ).tag(sync=True) _view_count = Int( None, allow_none=True, help= "EXPERIMENTAL: The number of views of the model displayed in the frontend. This attribute is experimental and may change or be removed in the future. None signifies that views will not be tracked. Set this to 0 to start tracking view creation/deletion." ).tag(sync=True) comm = Instance('ipykernel.comm.Comm', allow_none=True) keys = List(help="The traits which are synced.") @default('keys') def _default_keys(self): return [name for name in self.traits(sync=True)] _property_lock = Dict() _holding_sync = False _states_to_send = Set() _display_callbacks = Instance(CallbackDispatcher, ()) _msg_callbacks = Instance(CallbackDispatcher, ()) #------------------------------------------------------------------------- # (Con/de)structor #------------------------------------------------------------------------- def __init__(self, **kwargs): """Public constructor""" self._model_id = kwargs.pop('model_id', None) super(Widget, self).__init__(**kwargs) Widget._call_widget_constructed(self) self.open() def __del__(self): """Object disposal""" self.close() #------------------------------------------------------------------------- # Properties #------------------------------------------------------------------------- def open(self): """Open a comm to the frontend if one isn't already open.""" if self.comm is None: state, buffer_paths, buffers = _remove_buffers(self.get_state()) args = dict(target_name='jupyter.widget', data={ 'state': state, 'buffer_paths': buffer_paths }, buffers=buffers, metadata={'version': __protocol_version__}) if self._model_id is not None: args['comm_id'] = self._model_id self.comm = Comm(**args) @observe('comm') def _comm_changed(self, change): """Called when the comm is changed.""" if change['new'] is None: return self._model_id = self.model_id self.comm.on_msg(self._handle_msg) Widget.widgets[self.model_id] = self @property def model_id(self): """Gets the model id of this widget. If a Comm doesn't exist yet, a Comm will be created automagically.""" return self.comm.comm_id #------------------------------------------------------------------------- # Methods #------------------------------------------------------------------------- def close(self): """Close method. Closes the underlying comm. When the comm is closed, all of the widget views are automatically removed from the front-end.""" if self.comm is not None: Widget.widgets.pop(self.model_id, None) self.comm.close() self.comm = None self._ipython_display_ = None def send_state(self, key=None): """Sends the widget state, or a piece of it, to the front-end. Parameters ---------- key : unicode, or iterable (optional) A single property's name or iterable of property names to sync with the front-end. """ state = self.get_state(key=key) state, buffer_paths, buffers = _remove_buffers(state) msg = { 'method': 'update', 'state': state, 'buffer_paths': buffer_paths } self._send(msg, buffers=buffers) def get_state(self, key=None, drop_defaults=False): """Gets the widget state, or a piece of it. Parameters ---------- key : unicode or iterable (optional) A single property's name or iterable of property names to get. Returns ------- state : dict of states metadata : dict metadata for each field: {key: metadata} """ if key is None: keys = self.keys elif isinstance(key, string_types): keys = [key] elif isinstance(key, collections.Iterable): keys = key else: raise ValueError( "key must be a string, an iterable of keys, or None") state = {} traits = self.traits() for k in keys: to_json = self.trait_metadata(k, 'to_json', self._trait_to_json) value = to_json(getattr(self, k), self) if not PY3 and isinstance(traits[k], Bytes) and isinstance( value, bytes): value = memoryview(value) if not drop_defaults or not self._compare(value, traits[k].default_value): state[k] = value return state def _is_numpy(self, x): return x.__class__.__name__ == 'ndarray' and x.__class__.__module__ == 'numpy' def _compare(self, a, b): if self._is_numpy(a) or self._is_numpy(b): import numpy as np return np.array_equal(a, b) else: return a == b def set_state(self, sync_data): """Called when a state is received from the front-end.""" # The order of these context managers is important. Properties must # be locked when the hold_trait_notification context manager is # released and notifications are fired. with self._lock_property(**sync_data), self.hold_trait_notifications(): for name in sync_data: if name in self.keys: from_json = self.trait_metadata(name, 'from_json', self._trait_from_json) self.set_trait(name, from_json(sync_data[name], self)) def send(self, content, buffers=None): """Sends a custom msg to the widget model in the front-end. Parameters ---------- content : dict Content of the message to send. buffers : list of binary buffers Binary buffers to send with message """ self._send({"method": "custom", "content": content}, buffers=buffers) def on_msg(self, callback, remove=False): """(Un)Register a custom msg receive callback. Parameters ---------- callback: callable callback will be passed three arguments when a message arrives:: callback(widget, content, buffers) remove: bool True if the callback should be unregistered.""" self._msg_callbacks.register_callback(callback, remove=remove) def on_displayed(self, callback, remove=False): """(Un)Register a widget displayed callback. Parameters ---------- callback: method handler Must have a signature of:: callback(widget, **kwargs) kwargs from display are passed through without modification. remove: bool True if the callback should be unregistered.""" self.comm.comm_log.write("widget::on_displayed !!!!!!!!!!\n") self.comm.comm_log.flush() self._display_callbacks.register_callback(callback, remove=remove) def add_traits(self, **traits): """Dynamically add trait attributes to the Widget.""" super(Widget, self).add_traits(**traits) for name, trait in traits.items(): if trait.get_metadata('sync'): self.keys.append(name) self.send_state(name) def notify_change(self, change): """Called when a property has changed.""" # Send the state to the frontend before the user-registered callbacks # are called. name = change['name'] if self.comm is not None and self.comm.kernel is not None: # Make sure this isn't information that the front-end just sent us. if name in self.keys and self._should_send_property( name, change['new']): # Send new state to front-end self.send_state(key=name) super(Widget, self).notify_change(change) def __repr__(self): return self._gen_repr_from_keys(self._repr_keys()) #------------------------------------------------------------------------- # Support methods #------------------------------------------------------------------------- @contextmanager def _lock_property(self, **properties): """Lock a property-value pair. The value should be the JSON state of the property. NOTE: This, in addition to the single lock for all state changes, is flawed. In the future we may want to look into buffering state changes back to the front-end.""" self._property_lock = properties try: yield finally: self._property_lock = {} @contextmanager def hold_sync(self): """Hold syncing any state until the outermost context manager exits""" if self._holding_sync is True: yield else: try: self._holding_sync = True yield finally: self._holding_sync = False self.send_state(self._states_to_send) self._states_to_send.clear() def _should_send_property(self, key, value): """Check the property lock (property_lock)""" to_json = self.trait_metadata(key, 'to_json', self._trait_to_json) # A roundtrip conversion through json in the comparison takes care of # idiosyncracies of how python data structures map to json, for example # tuples get converted to lists. if (key in self._property_lock and jsonloads( jsondumps(to_json(value, self))) == self._property_lock[key]): return False elif self._holding_sync: self._states_to_send.add(key) return False else: return True # Event handlers @_show_traceback def _handle_msg(self, msg): """Called when a msg is received from the front-end""" data = msg['content']['data'] method = data['method'] if method == 'update': if 'state' in data: state = data['state'] if 'buffer_paths' in data: _put_buffers(state, data['buffer_paths'], msg['buffers']) self.set_state(state) # Handle a state request. elif method == 'request_state': self.send_state() # Handle a custom msg from the front-end. elif method == 'custom': if 'content' in data: self._handle_custom_msg(data['content'], msg['buffers']) # Catch remainder. else: self.log.error( 'Unknown front-end to back-end widget msg with method "%s"' % method) def _handle_custom_msg(self, content, buffers): """Called when a custom msg is received.""" self._msg_callbacks(self, content, buffers) def _handle_displayed(self, **kwargs): """Called when a view has been displayed for this widget instance""" self.comm.comm_log.write("widget::_handle_displayed !!!!!!!!!!\n") self.comm.comm_log.flush() self._display_callbacks(self, **kwargs) @staticmethod def _trait_to_json(x, self): """Convert a trait value to json.""" return x @staticmethod def _trait_from_json(x, self): """Convert json values to objects.""" return x def _ipython_display_(self, **kwargs): """Called when `IPython.display.display` is called on the widget.""" self.comm.comm_log.write("widget::_ipython_display !!!!!!!!!!\n") self.comm.comm_log.flush() if self._view_name is not None: # The 'application/vnd.jupyter.widget-view+json' mimetype has not been registered yet. # See the registration process and naming convention at # http://tools.ietf.org/html/rfc6838 # and the currently registered mimetypes at # http://www.iana.org/assignments/media-types/media-types.xhtml. data = { 'text/plain': "A Jupyter Widget", 'application/vnd.jupyter.widget-view+json': { 'version_major': 2, 'version_minor': 0, 'model_id': self._model_id } } self.comm.comm_log.write( "Calling display(then _handle_displayed) with data -> %s\n" % data) self.comm.comm_log.flush() display(data, raw=True) self._handle_displayed(**kwargs) def _send(self, msg, buffers=None): """Sends a message to the model in the front-end.""" if self.comm is not None and self.comm.kernel is not None: self.comm.send(data=msg, buffers=buffers) def _repr_keys(self): traits = self.traits() for key in sorted(self.keys): # Exclude traits that start with an underscore if key[0] == '_': continue # Exclude traits who are equal to their default value value = getattr(self, key) trait = traits[key] if self._compare(value, trait.default_value): continue elif (isinstance(trait, (Container, Dict)) and trait.default_value == Undefined and len(value) == 0): # Empty container, and dynamic default will be empty continue yield key def _gen_repr_from_keys(self, keys): class_name = self.__class__.__name__ signature = ', '.join('%s=%r' % (key, getattr(self, key)) for key in keys) return '%s(%s)' % (class_name, signature)
class WWTLabApplication(BaseWWTWidget): """ A handle the WWT JupyterLab application. While other parts of pywwt create "widgets", bound to variables running inside Python notebooks, this class represents a connection to the standalone "application", which exists in JupyterLab independently of any one specific notebook. The Python API is the same, it's just that the JSON messages we send are routed to the separate application rather than our own iframe. """ _comm = None _controls = None def __init__(self): _maybe_perpetrate_mega_kernel_hack() self._comm = Comm(target_name='@wwtelescope/jupyterlab:research', data={}) self._comm.on_msg(self._on_comm_message_received) self._comm.open() super(WWTLabApplication, self).__init__() def _on_comm_message_received(self, msg): """ Called when we receive a comms message. NOTE: because this code is run asynchronously in Jupyter's comms architecture, exceptions and printouts don't get reported to the user -- they just disappear. I don't know if there's a "right" way to address that. """ payload = msg['content']['data'] # Special message from the hub indicating app liveness status if payload.get('type') == 'wwt_jupyter_viewer_status': self._on_app_status_change(alive=payload['alive']) # don't return -- maybe someone downstream can use this, and message # processing needs to handle all sorts of unexpected messages anyway self._on_app_message_received(payload) def _actually_send_msg(self, payload): self._comm.send(payload) def _serve_file(self, filename, extension=''): return serve_file(filename, extension=extension) def _create_image_layer(self, **kwargs): """Returns a specialized subclass of ImageLayer that has some extra hooks for creating UI control points. """ return JupyterImageLayer(parent=self, **kwargs) @property def layer_controls(self): if self._controls is None: opacity_slider = widgets.FloatSlider(value=self.foreground_opacity, min=0, max=1, readout=False) foreground_menu = widgets.Dropdown(options=self.available_layers, value=self.foreground) background_menu = widgets.Dropdown(options=self.available_layers, value=self.background) link((opacity_slider, 'value'), (self, 'foreground_opacity')) link((foreground_menu, 'value'), (self, 'foreground')) link((background_menu, 'value'), (self, 'background')) self._controls = widgets.HBox( [background_menu, opacity_slider, foreground_menu]) return self._controls
class Widget(LoggingHasTraits): #------------------------------------------------------------------------- # Class attributes #------------------------------------------------------------------------- _widget_construction_callback = None # widgets is a dictionary of all active widget objects widgets = {} # widget_types is a registry of widgets by module, version, and name: widget_types = WidgetRegistry() @classmethod def close_all(cls): for widget in list(cls.widgets.values()): widget.close() @staticmethod def on_widget_constructed(callback): """Registers a callback to be called when a widget is constructed. The callback must have the following signature: callback(widget)""" Widget._widget_construction_callback = callback @staticmethod def _call_widget_constructed(widget): """Static method, called when a widget is constructed.""" if Widget._widget_construction_callback is not None and callable(Widget._widget_construction_callback): Widget._widget_construction_callback(widget) @staticmethod def handle_comm_opened(comm, msg): """Static method, called when a widget is constructed.""" version = msg.get('metadata', {}).get('version', '') if version.split('.')[0] != PROTOCOL_VERSION_MAJOR: raise ValueError("Incompatible widget protocol versions: received version %r, expected version %r"%(version, __protocol_version__)) data = msg['content']['data'] state = data['state'] # Find the widget class to instantiate in the registered widgets widget_class = Widget.widget_types.get(state['_model_module'], state['_model_module_version'], state['_model_name'], state['_view_module'], state['_view_module_version'], state['_view_name']) widget = widget_class(comm=comm) if 'buffer_paths' in data: _put_buffers(state, data['buffer_paths'], msg['buffers']) widget.set_state(state) @staticmethod def get_manager_state(drop_defaults=False, widgets=None): """Returns the full state for a widget manager for embedding :param drop_defaults: when True, it will not include default value :param widgets: list with widgets to include in the state (or all widgets when None) :return: """ state = {} if widgets is None: widgets = Widget.widgets.values() for widget in widgets: state[widget.model_id] = widget._get_embed_state(drop_defaults=drop_defaults) return {'version_major': 2, 'version_minor': 0, 'state': state} def _get_embed_state(self, drop_defaults=False): state = { 'model_name': self._model_name, 'model_module': self._model_module, 'model_module_version': self._model_module_version } model_state, buffer_paths, buffers = _remove_buffers(self.get_state(drop_defaults=drop_defaults)) state['state'] = model_state if len(buffers) > 0: state['buffers'] = [{'encoding': 'base64', 'path': p, 'data': standard_b64encode(d).decode('ascii')} for p, d in zip(buffer_paths, buffers)] return state def get_view_spec(self): return dict(version_major=2, version_minor=0, model_id=self._model_id) #------------------------------------------------------------------------- # Traits #------------------------------------------------------------------------- _model_name = Unicode('WidgetModel', help="Name of the model.", read_only=True).tag(sync=True) _model_module = Unicode('@jupyter-widgets/base', help="The namespace for the model.", read_only=True).tag(sync=True) _model_module_version = Unicode(__jupyter_widgets_base_version__, help="A semver requirement for namespace version containing the model.", read_only=True).tag(sync=True) _view_name = Unicode(None, allow_none=True, help="Name of the view.").tag(sync=True) _view_module = Unicode(None, allow_none=True, help="The namespace for the view.").tag(sync=True) _view_module_version = Unicode('', help="A semver requirement for the namespace version containing the view.").tag(sync=True) _view_count = Int(None, allow_none=True, help="EXPERIMENTAL: The number of views of the model displayed in the frontend. This attribute is experimental and may change or be removed in the future. None signifies that views will not be tracked. Set this to 0 to start tracking view creation/deletion.").tag(sync=True) comm = Instance('ipykernel.comm.Comm', allow_none=True) keys = List(help="The traits which are synced.") @default('keys') def _default_keys(self): return [name for name in self.traits(sync=True)] _property_lock = Dict() _holding_sync = False _states_to_send = Set() _display_callbacks = Instance(CallbackDispatcher, ()) _msg_callbacks = Instance(CallbackDispatcher, ()) #------------------------------------------------------------------------- # (Con/de)structor #------------------------------------------------------------------------- def __init__(self, **kwargs): """Public constructor""" self._model_id = kwargs.pop('model_id', None) super(Widget, self).__init__(**kwargs) Widget._call_widget_constructed(self) self.open() def __del__(self): """Object disposal""" self.close() #------------------------------------------------------------------------- # Properties #------------------------------------------------------------------------- def open(self): """Open a comm to the frontend if one isn't already open.""" if self.comm is None: state, buffer_paths, buffers = _remove_buffers(self.get_state()) args = dict(target_name='jupyter.widget', data={'state': state, 'buffer_paths': buffer_paths}, buffers=buffers, metadata={'version': __protocol_version__} ) if self._model_id is not None: args['comm_id'] = self._model_id self.comm = Comm(**args) @observe('comm') def _comm_changed(self, change): """Called when the comm is changed.""" if change['new'] is None: return self._model_id = self.model_id self.comm.on_msg(self._handle_msg) Widget.widgets[self.model_id] = self @property def model_id(self): """Gets the model id of this widget. If a Comm doesn't exist yet, a Comm will be created automagically.""" return self.comm.comm_id #------------------------------------------------------------------------- # Methods #------------------------------------------------------------------------- def close(self): """Close method. Closes the underlying comm. When the comm is closed, all of the widget views are automatically removed from the front-end.""" if self.comm is not None: Widget.widgets.pop(self.model_id, None) self.comm.close() self.comm = None self._ipython_display_ = None def send_state(self, key=None): """Sends the widget state, or a piece of it, to the front-end, if it exists. Parameters ---------- key : unicode, or iterable (optional) A single property's name or iterable of property names to sync with the front-end. """ state = self.get_state(key=key) if len(state) > 0: if self._property_lock: # we need to keep this dict up to date with the front-end values for name, value in state.items(): if name in self._property_lock: self._property_lock[name] = value state, buffer_paths, buffers = _remove_buffers(state) msg = {'method': 'update', 'state': state, 'buffer_paths': buffer_paths} self._send(msg, buffers=buffers) def get_state(self, key=None, drop_defaults=False): """Gets the widget state, or a piece of it. Parameters ---------- key : unicode or iterable (optional) A single property's name or iterable of property names to get. Returns ------- state : dict of states metadata : dict metadata for each field: {key: metadata} """ if key is None: keys = self.keys elif isinstance(key, string_types): keys = [key] elif isinstance(key, collections.Iterable): keys = key else: raise ValueError("key must be a string, an iterable of keys, or None") state = {} traits = self.traits() for k in keys: to_json = self.trait_metadata(k, 'to_json', self._trait_to_json) value = to_json(getattr(self, k), self) if not PY3 and isinstance(traits[k], Bytes) and isinstance(value, bytes): value = memoryview(value) if not drop_defaults or not self._compare(value, traits[k].default_value): state[k] = value return state def _is_numpy(self, x): return x.__class__.__name__ == 'ndarray' and x.__class__.__module__ == 'numpy' def _compare(self, a, b): if self._is_numpy(a) or self._is_numpy(b): import numpy as np return np.array_equal(a, b) else: return a == b def set_state(self, sync_data): """Called when a state is received from the front-end.""" # The order of these context managers is important. Properties must # be locked when the hold_trait_notification context manager is # released and notifications are fired. with self._lock_property(**sync_data), self.hold_trait_notifications(): for name in sync_data: if name in self.keys: from_json = self.trait_metadata(name, 'from_json', self._trait_from_json) self.set_trait(name, from_json(sync_data[name], self)) def send(self, content, buffers=None): """Sends a custom msg to the widget model in the front-end. Parameters ---------- content : dict Content of the message to send. buffers : list of binary buffers Binary buffers to send with message """ self._send({"method": "custom", "content": content}, buffers=buffers) def on_msg(self, callback, remove=False): """(Un)Register a custom msg receive callback. Parameters ---------- callback: callable callback will be passed three arguments when a message arrives:: callback(widget, content, buffers) remove: bool True if the callback should be unregistered.""" self._msg_callbacks.register_callback(callback, remove=remove) def on_displayed(self, callback, remove=False): """(Un)Register a widget displayed callback. Parameters ---------- callback: method handler Must have a signature of:: callback(widget, **kwargs) kwargs from display are passed through without modification. remove: bool True if the callback should be unregistered.""" self._display_callbacks.register_callback(callback, remove=remove) def add_traits(self, **traits): """Dynamically add trait attributes to the Widget.""" super(Widget, self).add_traits(**traits) for name, trait in traits.items(): if trait.get_metadata('sync'): self.keys.append(name) self.send_state(name) def notify_change(self, change): """Called when a property has changed.""" # Send the state to the frontend before the user-registered callbacks # are called. name = change['name'] if self.comm is not None and self.comm.kernel is not None: # Make sure this isn't information that the front-end just sent us. if name in self.keys and self._should_send_property(name, getattr(self, name)): # Send new state to front-end self.send_state(key=name) super(Widget, self).notify_change(change) def __repr__(self): return self._gen_repr_from_keys(self._repr_keys()) #------------------------------------------------------------------------- # Support methods #------------------------------------------------------------------------- @contextmanager def _lock_property(self, **properties): """Lock a property-value pair. The value should be the JSON state of the property. NOTE: This, in addition to the single lock for all state changes, is flawed. In the future we may want to look into buffering state changes back to the front-end.""" self._property_lock = properties try: yield finally: self._property_lock = {} @contextmanager def hold_sync(self): """Hold syncing any state until the outermost context manager exits""" if self._holding_sync is True: yield else: try: self._holding_sync = True yield finally: self._holding_sync = False self.send_state(self._states_to_send) self._states_to_send.clear() def _should_send_property(self, key, value): """Check the property lock (property_lock)""" to_json = self.trait_metadata(key, 'to_json', self._trait_to_json) if key in self._property_lock: # model_state, buffer_paths, buffers split_value = _remove_buffers({ key: to_json(value, self)}) split_lock = _remove_buffers({ key: self._property_lock[key]}) # A roundtrip conversion through json in the comparison takes care of # idiosyncracies of how python data structures map to json, for example # tuples get converted to lists. if (jsonloads(jsondumps(split_value[0])) == split_lock[0] and split_value[1] == split_lock[1] and _buffer_list_equal(split_value[2], split_lock[2])): return False if self._holding_sync: self._states_to_send.add(key) return False else: return True # Event handlers @_show_traceback def _handle_msg(self, msg): """Called when a msg is received from the front-end""" data = msg['content']['data'] method = data['method'] if method == 'update': if 'state' in data: state = data['state'] if 'buffer_paths' in data: _put_buffers(state, data['buffer_paths'], msg['buffers']) self.set_state(state) # Handle a state request. elif method == 'request_state': self.send_state() # Handle a custom msg from the front-end. elif method == 'custom': if 'content' in data: self._handle_custom_msg(data['content'], msg['buffers']) # Catch remainder. else: self.log.error('Unknown front-end to back-end widget msg with method "%s"' % method) def _handle_custom_msg(self, content, buffers): """Called when a custom msg is received.""" self._msg_callbacks(self, content, buffers) def _handle_displayed(self, **kwargs): """Called when a view has been displayed for this widget instance""" self._display_callbacks(self, **kwargs) @staticmethod def _trait_to_json(x, self): """Convert a trait value to json.""" return x @staticmethod def _trait_from_json(x, self): """Convert json values to objects.""" return x def _ipython_display_(self, **kwargs): """Called when `IPython.display.display` is called on the widget.""" plaintext = repr(self) if len(plaintext) > 110: plaintext = plaintext[:110] + '…' data = { 'text/plain': plaintext, } if self._view_name is not None: # The 'application/vnd.jupyter.widget-view+json' mimetype has not been registered yet. # See the registration process and naming convention at # http://tools.ietf.org/html/rfc6838 # and the currently registered mimetypes at # http://www.iana.org/assignments/media-types/media-types.xhtml. data['application/vnd.jupyter.widget-view+json'] = { 'version_major': 2, 'version_minor': 0, 'model_id': self._model_id } display(data, raw=True) if self._view_name is not None: self._handle_displayed(**kwargs) def _send(self, msg, buffers=None): """Sends a message to the model in the front-end.""" if self.comm is not None and self.comm.kernel is not None: self.comm.send(data=msg, buffers=buffers) def _repr_keys(self): traits = self.traits() for key in sorted(self.keys): # Exclude traits that start with an underscore if key[0] == '_': continue # Exclude traits who are equal to their default value value = getattr(self, key) trait = traits[key] if self._compare(value, trait.default_value): continue elif (isinstance(trait, (Container, Dict)) and trait.default_value == Undefined and (value is None or len(value) == 0)): # Empty container, and dynamic default will be empty continue yield key def _gen_repr_from_keys(self, keys): class_name = self.__class__.__name__ signature = ', '.join( '%s=%r' % (key, getattr(self, key)) for key in keys ) return '%s(%s)' % (class_name, signature)
class BrowserContext(object): """Represents an in-browser context.""" def __init__(self): """Constructor""" self._calls = 0 self._callbacks = {} # Push the Javascript to the front-end. with open( os.path.join(os.path.split(__file__)[0], 'backend_context.js'), 'r') as f: display(Javascript(data=f.read())) # Open communication with the front-end. self._comm = Comm(target_name='BrowserContext') self._comm.on_msg(self._on_msg) def _on_msg(self, msg): """Handle messages from the front-end""" data = msg['content']['data'] # If the message is a call invoke, run the function and send # the results. if 'callback' in data: guid = data['callback'] callback = callback_registry[guid] args = data['arguments'] args = [self.deserialize(a) for a in args] index = data['index'] results = callback(*args) return self.serialize( self._send('return', index=index, results=results)) # The message is not a call invoke, it must be an object # that is a response to a Python request. else: index = data['index'] immutable = data['immutable'] value = data['value'] if index in self._callbacks: self._callbacks[index].resolve({ 'immutable': immutable, 'value': value }) del self._callbacks[index] def serialize(self, obj): """Serialize an object for sending to the front-end.""" if hasattr(obj, '_jsid'): return {'immutable': False, 'value': obj._jsid} else: obj_json = {'immutable': True} try: json.dumps(obj) obj_json['value'] = obj except: pass if callable(obj): guid = str(uuid.uuid4()) callback_registry[guid] = obj obj_json['callback'] = guid return obj_json def deserialize(self, obj): """Deserialize an object from the front-end.""" if obj['immutable']: return obj['value'] else: guid = obj['value'] if not guid in object_registry: instance = JSObject(self, guid) object_registry[guid] = instance return object_registry[guid] # Message types def getattr(self, parent, child): return self._send('getattr', parent=parent, child=child) def setattr(self, parent, child, value): return self._send('setattr', parent=parent, child=child, value=value) def apply(self, parent, function, *pargs): return self._send('apply', parent=parent, function=function, args=pargs) def _send(self, method, **parameters): """Sends a message to the front-end and returns a promise.""" msg = { 'index': self._calls, 'method': method, } msg.update(parameters) promise = SimplePromise() self._callbacks[self._calls] = promise self._calls += 1 self._comm.send(msg) return promise
class Widget(LoggingHasTraits): #------------------------------------------------------------------------- # Class attributes #------------------------------------------------------------------------- _widget_construction_callback = None # widgets is a dictionary of all active widget objects widgets = {} # widget_types is a registry of widgets by module, version, and name: widget_types = WidgetRegistry() @staticmethod def on_widget_constructed(callback): """Registers a callback to be called when a widget is constructed. The callback must have the following signature: callback(widget)""" Widget._widget_construction_callback = callback @staticmethod def _call_widget_constructed(widget): """Static method, called when a widget is constructed.""" if Widget._widget_construction_callback is not None and callable( Widget._widget_construction_callback): Widget._widget_construction_callback(widget) @staticmethod def handle_comm_opened(comm, msg): """Static method, called when a widget is constructed.""" data = msg['content']['data'] state = data['state'] # Find the widget class to instantiate in the registered widgets widget_class = Widget.widget_types.get(state['_model_module'], state['_model_module_version'], state['_model_name'], state['_view_module'], state['_view_module_version'], state['_view_name']) widget = widget_class(comm=comm) if 'buffer_paths' in data: _put_buffers(state, data['buffer_paths'], msg['buffers']) widget.set_state(state) @staticmethod def get_manager_state(drop_defaults=False): return dict( version_major=1, version_minor=0, state={ k: { 'model_name': Widget.widgets[k]._model_name, 'model_module': Widget.widgets[k]._model_module, 'model_module_version': Widget.widgets[k]._model_module_version, 'state': Widget.widgets[k].get_state(drop_defaults=drop_defaults) } for k in Widget.widgets }) def get_view_spec(self): return dict(version_major=1, version_minor=0, model_id=self._model_id) #------------------------------------------------------------------------- # Traits #------------------------------------------------------------------------- _model_module = Unicode( None, help="A JavaScript module name in which to find _model_name.", read_only=True).tag(sync=True) _model_name = Unicode('WidgetModel', help="Name of the model.", read_only=True).tag(sync=True) _model_module_version = Unicode( '*', help="A semver requirement for the model module version.", read_only=True).tag(sync=True) _view_module = Unicode( None, allow_none=True, help="A JavaScript module in which to find _view_name.").tag(sync=True) _view_name = Unicode(None, allow_none=True, help="Name of the view object.").tag(sync=True) _view_module_version = Unicode( '*', help="A semver requirement for the view module.").tag(sync=True) _view_count = Int( read_only=True, help= "EXPERIMENTAL: The number of views of the model displayed in the frontend. This attribute is experimental and may change or be removed in the future." ).tag(sync=True) comm = Instance('ipykernel.comm.Comm', allow_none=True) msg_throttle = Int( 1, help= """Maximum number of msgs the front-end can send before receiving an idle msg from the back-end.""" ).tag(sync=True) keys = List() @default('keys') def _default_keys(self): return [name for name in self.traits(sync=True)] _property_lock = Dict() _holding_sync = False _states_to_send = Set() _display_callbacks = Instance(CallbackDispatcher, ()) _msg_callbacks = Instance(CallbackDispatcher, ()) #------------------------------------------------------------------------- # (Con/de)structor #------------------------------------------------------------------------- def __init__(self, **kwargs): """Public constructor""" self._model_id = kwargs.pop('model_id', None) super(Widget, self).__init__(**kwargs) Widget._call_widget_constructed(self) self.open() def __del__(self): """Object disposal""" self.close() #------------------------------------------------------------------------- # Properties #------------------------------------------------------------------------- def open(self): """Open a comm to the frontend if one isn't already open.""" if self.comm is None: state, buffer_paths, buffers = _remove_buffers(self.get_state()) args = dict(target_name='jupyter.widget', data={ 'state': state, 'buffer_paths': buffer_paths }, buffers=buffers) if self._model_id is not None: args['comm_id'] = self._model_id self.comm = Comm(**args) @observe('comm') def _comm_changed(self, change): """Called when the comm is changed.""" if change['new'] is None: return self._model_id = self.model_id self.comm.on_msg(self._handle_msg) Widget.widgets[self.model_id] = self @property def model_id(self): """Gets the model id of this widget. If a Comm doesn't exist yet, a Comm will be created automagically.""" return self.comm.comm_id #------------------------------------------------------------------------- # Methods #------------------------------------------------------------------------- def close(self): """Close method. Closes the underlying comm. When the comm is closed, all of the widget views are automatically removed from the front-end.""" if self.comm is not None: Widget.widgets.pop(self.model_id, None) self.comm.close() self.comm = None self._ipython_display_ = None def send_state(self, key=None): """Sends the widget state, or a piece of it, to the front-end. Parameters ---------- key : unicode, or iterable (optional) A single property's name or iterable of property names to sync with the front-end. """ state = self.get_state(key=key) state, buffer_paths, buffers = _remove_buffers(state) msg = { 'method': 'update', 'state': state, 'buffer_paths': buffer_paths } self._send(msg, buffers=buffers) def get_state(self, key=None, drop_defaults=False): """Gets the widget state, or a piece of it. Parameters ---------- key : unicode or iterable (optional) A single property's name or iterable of property names to get. Returns ------- state : dict of states metadata : dict metadata for each field: {key: metadata} """ if key is None: keys = self.keys elif isinstance(key, string_types): keys = [key] elif isinstance(key, collections.Iterable): keys = key else: raise ValueError( "key must be a string, an iterable of keys, or None") state = {} traits = self.traits() for k in keys: to_json = self.trait_metadata(k, 'to_json', self._trait_to_json) value = to_json(getattr(self, k), self) if not PY3 and isinstance(traits[k], Bytes) and isinstance( value, bytes): value = memoryview(value) if not drop_defaults or not self._compare(value, traits[k].default_value): state[k] = value return state def _is_numpy(self, x): return x.__class__.__name__ == 'ndarray' and x.__class__.__module__ == 'numpy' def _compare(self, a, b): if self._is_numpy(a) or self._is_numpy(b): import numpy as np return np.equal(a, b) else: return a == b def set_state(self, sync_data): """Called when a state is received from the front-end.""" # The order of these context managers is important. Properties must # be locked when the hold_trait_notification context manager is # released and notifications are fired. with self._lock_property(**sync_data), self.hold_trait_notifications(): for name in sync_data: if name in self.keys: from_json = self.trait_metadata(name, 'from_json', self._trait_from_json) self.set_trait(name, from_json(sync_data[name], self)) def send(self, content, buffers=None): """Sends a custom msg to the widget model in the front-end. Parameters ---------- content : dict Content of the message to send. buffers : list of binary buffers Binary buffers to send with message """ self._send({"method": "custom", "content": content}, buffers=buffers) def on_msg(self, callback, remove=False): """(Un)Register a custom msg receive callback. Parameters ---------- callback: callable callback will be passed three arguments when a message arrives:: callback(widget, content, buffers) remove: bool True if the callback should be unregistered.""" self._msg_callbacks.register_callback(callback, remove=remove) def on_displayed(self, callback, remove=False): """(Un)Register a widget displayed callback. Parameters ---------- callback: method handler Must have a signature of:: callback(widget, **kwargs) kwargs from display are passed through without modification. remove: bool True if the callback should be unregistered.""" self._display_callbacks.register_callback(callback, remove=remove) def add_traits(self, **traits): """Dynamically add trait attributes to the Widget.""" super(Widget, self).add_traits(**traits) for name, trait in traits.items(): if trait.get_metadata('sync'): self.keys.append(name) self.send_state(name) def notify_change(self, change): """Called when a property has changed.""" # Send the state to the frontend before the user-registered callbacks # are called. name = change['name'] if self.comm is not None and self.comm.kernel is not None: # Make sure this isn't information that the front-end just sent us. if name in self.keys and self._should_send_property( name, change['new']): # Send new state to front-end self.send_state(key=name) super(Widget, self).notify_change(change) #------------------------------------------------------------------------- # Support methods #------------------------------------------------------------------------- @contextmanager def _lock_property(self, **properties): """Lock a property-value pair. The value should be the JSON state of the property. NOTE: This, in addition to the single lock for all state changes, is flawed. In the future we may want to look into buffering state changes back to the front-end.""" self._property_lock = properties try: yield finally: self._property_lock = {} @contextmanager def hold_sync(self): """Hold syncing any state until the outermost context manager exits""" if self._holding_sync is True: yield else: try: self._holding_sync = True yield finally: self._holding_sync = False self.send_state(self._states_to_send) self._states_to_send.clear() def _should_send_property(self, key, value): """Check the property lock (property_lock)""" to_json = self.trait_metadata(key, 'to_json', self._trait_to_json) if (key in self._property_lock and to_json(value, self) == self._property_lock[key]): return False elif self._holding_sync: self._states_to_send.add(key) return False else: return True # Event handlers @_show_traceback def _handle_msg(self, msg): """Called when a msg is received from the front-end""" data = msg['content']['data'] method = data['method'] if method == 'update': if 'state' in data: state = data['state'] if 'buffer_paths' in data: _put_buffers(state, data['buffer_paths'], msg['buffers']) self.set_state(state) # Handle a state request. elif method == 'request_state': self.send_state() # Handle a custom msg from the front-end. elif method == 'custom': if 'content' in data: self._handle_custom_msg(data['content'], msg['buffers']) # Catch remainder. else: self.log.error( 'Unknown front-end to back-end widget msg with method "%s"' % method) def _handle_custom_msg(self, content, buffers): """Called when a custom msg is received.""" self._msg_callbacks(self, content, buffers) def _handle_displayed(self, **kwargs): """Called when a view has been displayed for this widget instance""" self._display_callbacks(self, **kwargs) @staticmethod def _trait_to_json(x, self): """Convert a trait value to json.""" return x @staticmethod def _trait_from_json(x, self): """Convert json values to objects.""" return x def _ipython_display_(self, **kwargs): """Called when `IPython.display.display` is called on the widget.""" if self._view_name is not None: # TODO: delete this sending of a comm message when the display statement # below works. Then add a 'text/plain' mimetype to the dictionary below. self._send({"method": "display"}) # The 'application/vnd.jupyter.widget-view+json' mimetype has not been registered yet. # See the registration process and naming convention at # http://tools.ietf.org/html/rfc6838 # and the currently registered mimetypes at # http://www.iana.org/assignments/media-types/media-types.xhtml. # We don't have a 'text/plain' entry, so this display message will be # will be invisible in the current notebook. data = { 'application/vnd.jupyter.widget-view+json': { 'model_id': self._model_id } } display(data, raw=True) self._handle_displayed(**kwargs) def _send(self, msg, buffers=None): """Sends a message to the model in the front-end.""" if self.comm is not None and self.comm.kernel is not None: self.comm.send(data=msg, buffers=buffers)
class JobManager(object): """ The KBase Job Manager class. This handles all jobs and makes their status available. On status lookups, it feeds the results to the KBaseJobs channel that the front end listens to. """ __instance = None # keys = job_id, values = { refresh = T/F, job = Job object } _running_jobs = dict() # keys = job_id, values = state from either Job object or NJS (these are identical) _completed_job_states = dict() _lookup_timer = None _comm = None _log = kblogging.get_logger(__name__) # TODO: should this not be done globally? _running_lookup_loop = False def __new__(cls): if JobManager.__instance is None: JobManager.__instance = object.__new__(cls) return JobManager.__instance def initialize_jobs(self, start_lookup_thread=True): """ Initializes this JobManager. This is expected to be run by a running Narrative, and naturally linked to a workspace. So it does the following steps. 1. app_util.system_variable('workspace_id') 2. get list of jobs with that ws id from UJS (also gets tag, cell_id, run_id) 3. initialize the Job objects by running NJS.get_job_params (also gets app_id) 4. start the status lookup loop. """ the_time = int(round(time.time() * 1000)) self._send_comm_message('start', {'time': the_time}) ws_id = system_variable('workspace_id') try: nar_jobs = clients.get('user_and_job_state').list_jobs2({ 'authstrat': 'kbaseworkspace', 'authparams': [str(ws_id)] }) except Exception as e: kblogging.log_event(self._log, 'init_error', {'err': str(e)}) new_e = transform_job_exception(e) error = { 'error': 'Unable to get initial jobs list', 'message': getattr(new_e, 'message', 'Unknown reason'), 'code': getattr(new_e, 'code', -1), 'source': getattr(new_e, 'source', 'jobmanager'), 'name': getattr(new_e, 'name', type(e).__name__), 'service': 'user_and_job_state' } self._send_comm_message('job_init_err', error) raise new_e job_ids = [j[0] for j in nar_jobs] job_states = clients.get('job_service').check_jobs({ 'job_ids': job_ids, 'with_job_params': 1 }) job_param_info = job_states.get('job_params', {}) job_check_error = job_states.get('check_error', {}) error_jobs = dict() for info in nar_jobs: job_id = info[0] user_info = info[1] job_meta = info[10] try: if job_id in job_param_info: job_info = job_param_info[job_id] job = Job.from_state(job_id, job_info, user_info[0], app_id=job_info.get('app_id'), tag=job_meta.get('tag', 'release'), cell_id=job_meta.get('cell_id', None), run_id=job_meta.get('run_id', None), token_id=job_meta.get('token_id', None), meta=job_meta) # Note that when jobs for this narrative are initially loaded, # they are set to not be refreshed. Rather, if a client requests # updates via the start_job_update message, the refresh flag will # be set to True. self._running_jobs[job_id] = { 'refresh': 0, 'job': job } elif job_id in job_check_error: job_err_state = { 'job_state': 'error', 'error': { 'error': 'KBase execution engine returned an error while looking up this job.', 'message': job_check_error[job_id].get('message', 'No error message available'), 'name': 'Job Error', 'code': job_check_error[job_id].get('code', -999), 'exception': { 'error_message': 'Job lookup in execution engine failed', 'error_type': job_check_error[job_id].get('name', 'unknown'), 'error_stacktrace': job_check_error[job_id].get('error', '') } }, 'cell_id': job_meta.get('cell_id', None), 'run_id': job_meta.get('run_id', None), } error_jobs[job_id] = job_err_state except Exception as e: kblogging.log_event(self._log, 'init_error', {'err': str(e)}) new_e = transform_job_exception(e) error = { 'error': 'Unable to get job info on initial lookup', 'job_id': job_id, 'message': getattr(new_e, 'message', 'Unknown reason'), 'code': getattr(new_e, 'code', -1), 'source': getattr(new_e, 'source', 'jobmanager'), 'name': getattr(new_e, 'name', type(e).__name__), 'service': 'job_service' } self._send_comm_message('job_init_lookup_err', error) raise new_e # should crash and burn on any of these. if len(job_check_error): err_str = 'Unable to find info for some jobs on initial lookup' err_type = 'job_init_partial_err' if len(job_check_error) == len(nar_jobs): err_str = 'Unable to get info for any job on initial lookup' err_type = 'job_init_lookup_err' error = { 'error': err_str, 'job_errors': error_jobs, 'message': 'Job information was unavailable from the server', 'code': -2, 'source': 'jobmanager', 'name': 'jobmanager', 'service': 'job_service', } self._send_comm_message(err_type, error) if not self._running_lookup_loop and start_lookup_thread: # only keep one loop at a time in cause this gets called again! if self._lookup_timer is not None: self._lookup_timer.cancel() self._running_lookup_loop = True self._lookup_job_status_loop() else: self._lookup_all_job_status() def _create_jobs(self, job_ids): """ TODO: error handling Makes a bunch of Job objects from job_ids. Initially used to make Child jobs from some parent, but will eventually be adapted to all jobs on startup. Just slaps them all into _running_jobs """ job_states = clients.get('job_service').check_jobs({'job_ids': job_ids, 'with_job_params': 1}) for job_id in job_ids: ujs_info = clients.get('user_and_job_state').get_job_info2(job_id) if job_id in job_ids and job_id not in self._running_jobs: job_info = job_states.get('job_params', {}).get(job_id, {}) job_meta = ujs_info[10] job = Job.from_state(job_id, # the id job_info, # params, etc. ujs_info[2], # owner id app_id=job_info.get('app_id', job_info.get('method')), tag=job_meta.get('tag', 'release'), cell_id=job_meta.get('cell_id', None), run_id=job_meta.get('run_id', None), token_id=job_meta.get('token_id', None), meta=job_meta) # Note that when jobs for this narrative are initially loaded, # they are set to not be refreshed. Rather, if a client requests # updates via the start_job_update message, the refresh flag will # be set to True. self._running_jobs[job_id] = { 'refresh': 0, 'job': job } def list_jobs(self): """ List all job ids, their info, and status in a quick HTML format. """ try: status_set = list() for job_id in self._running_jobs: job = self._running_jobs[job_id]['job'] job_state = self._get_job_state(job_id) job_state['app_id'] = job.app_id job_state['owner'] = job.owner status_set.append(job_state) if not len(status_set): return "No running jobs!" status_set = sorted(status_set, key=lambda s: s['creation_time']) for i in range(len(status_set)): status_set[i]['creation_time'] = datetime.datetime.strftime(datetime.datetime.fromtimestamp(status_set[i]['creation_time']/1000), "%Y-%m-%d %H:%M:%S") exec_start = status_set[i].get('exec_start_time', None) if 'finish_time' in status_set[i]: finished = status_set[i].get('finish_time', None) if finished is not None and exec_start: delta = datetime.datetime.fromtimestamp(finished/1000.0) - datetime.datetime.fromtimestamp(exec_start/1000.0) delta = delta - datetime.timedelta(microseconds=delta.microseconds) status_set[i]['run_time'] = str(delta) status_set[i]['finish_time'] = datetime.datetime.strftime(datetime.datetime.fromtimestamp(status_set[i]['finish_time']/1000), "%Y-%m-%d %H:%M:%S") elif exec_start: delta = datetime.datetime.utcnow() - datetime.datetime.utcfromtimestamp(exec_start/1000.0) delta = delta - datetime.timedelta(microseconds=delta.microseconds) status_set[i]['run_time'] = str(delta) else: status_set[i]['run_time'] = 'Not started' tmpl = """ <table class="table table-bordered table-striped table-condensed"> <tr> <th>Id</th> <th>Name</th> <th>Submitted</th> <th>Submitted By</th> <th>Status</th> <th>Run Time</th> <th>Complete Time</th> </tr> {% for j in jobs %} <tr> <td>{{ j.job_id|e }}</td> <td>{{ j.app_id|e }}</td> <td>{{ j.creation_time|e }}</td> <td>{{ j.owner|e }}</td> <td>{{ j.job_state|e }}</td> <td>{{ j.run_time|e }}</td> <td>{% if j.finish_time %}{{ j.finish_time|e }}{% else %}Incomplete{% endif %}</td> </tr> {% endfor %} </table> """ return HTML(Template(tmpl).render(jobs=status_set)) except Exception as e: kblogging.log_event(self._log, "list_jobs.error", {'err': str(e)}) raise def get_jobs_list(self): """ A convenience method for fetching an unordered list of all running Jobs. """ return [j['job'] for j in self._running_jobs.values()] def _construct_job_status(self, job, state): """ Creates a Job status dictionary with structure: { owner: string (username), spec: app_spec (from NMS, via biokbase.narrative.jobs.specmanager) widget_info: (if not finished, None, else...) job.get_viewer_params result state: { job_state: string, error (if present): dict of error info, cell_id: string/None, run_id: string/None, awe_job_id: string/None, canceled: 0/1 creation_time: epoch second exec_start_time: epoch/none, finish_time: epoch/none, finished: 0/1, job_id: string, status: (from UJS) [ timestamp(last_update, string), stage (string), status (string), progress (string/None), est_complete (string/None), complete (0/1), error (0/1) ], ujs_url: string } } """ widget_info = None app_spec = {} if job is None: state = { 'job_state': 'error', 'error': { 'error': 'Job does not seem to exist, or it is otherwise unavailable.', 'message': 'Job does not exist', 'name': 'Job Error', 'code': -1, 'exception': { 'error_message': 'job not found in JobManager', 'error_type': 'ValueError', 'error_stacktrace': '' } }, 'cell_id': None, 'run_id': None, } return { 'state': state, 'app_spec': app_spec, 'widget_info': widget_info, 'owner': None } # try: # app_spec = job.app_spec() # except Exception as e: # kblogging.log_event(self._log, "lookup_job_status.error", {'err': str(e)}) if state is None: kblogging.log_event(self._log, "lookup_job_status.error", {'err': 'Unable to get job state for job {}'.format(job.job_id)}) state = { 'job_state': 'error', 'error': { 'error': 'Unable to find current job state. Please try again later, or contact KBase.', 'message': 'Unable to return job state', 'name': 'Job Error', 'code': -1, 'source': 'JobManager._construct_job_status', 'exception': { 'error_message': 'No state provided during lookup', 'error_type': 'null-state', 'error_stacktrace': '', } }, 'creation_time': 0, 'cell_id': job.cell_id, 'run_id': job.run_id, 'job_id': job.job_id } elif 'lookup_error' in state: kblogging.log_event(self._log, "lookup_job_status.error", { 'err': 'Problem while getting state for job {}'.format(job.job_id), 'info': str(state['lookup_error']) }) state = { 'job_state': 'error', 'error': { 'error': 'Unable to fetch current state. Please try again later, or contact KBase.', 'message': 'Error while looking up job state', 'name': 'Job Error', 'code': -1, 'source': 'JobManager._construct_job_status', 'exception': { 'error_message': 'Error while fetching job state', 'error_type': 'failed-lookup', }, 'error_response': state['lookup_error'], 'creation_time': 0, 'cell_id': job.cell_id, 'run_id': job.run_id, 'job_id': job.job_id } } if state.get('finished', 0) == 1: try: widget_info = job.get_viewer_params(state) except Exception as e: # Can't get viewer params new_e = transform_job_exception(e) kblogging.log_event(self._log, "lookup_job_status.error", {'err': str(e)}) state['job_state'] = 'error' state['error'] = { 'error': 'Unable to generate App output viewer!\nThe App appears to have completed successfully,\nbut we cannot construct its output viewer.\nPlease contact the developer of this App for assistance.', 'message': 'Unable to build output viewer parameters!', 'name': 'App Error', 'code': getattr(new_e, "code", -1), 'source': getattr(new_e, "source", "JobManager") } if 'canceling' in self._running_jobs[job.job_id]: state['job_state'] = 'canceling' state.update({ 'child_jobs': self._child_job_states( state.get('sub_jobs', []), job.meta.get('batch_app'), job.meta.get('batch_tag') ) }) if 'batch_size' in job.meta: state.update({'batch_size': job.meta['batch_size']}) return {'state': state, 'spec': app_spec, 'widget_info': widget_info, 'owner': job.owner, 'listener_count': self._running_jobs[job.job_id]['refresh']} def _child_job_states(self, sub_job_list, app_id, app_tag): """ Fetches state for all jobs in the list. These are expected to be child jobs, with no actual Job object associated. So if they're done, we need to do the output mapping out of band. But the check_jobs call with params will return the app id. So that helps. app_id = the id of the app that all the child jobs are running (format: module/method, like "MEGAHIT/run_megahit") app_tag = one of "release", "beta", "dev" (the above two aren't stored with the subjob metadata, and won't until we back some more on KBParallel - I want to lobby for pushing toward just starting everything up at once from here and letting HTCondor deal with allocation) sub_job_list = list of ids of jobs to look up """ if not sub_job_list: return [] sub_job_list = sorted(sub_job_list) job_info = clients.get('job_service').check_jobs({'job_ids': sub_job_list, 'with_job_params': 1}) child_job_states = list() for job_id in sub_job_list: params = job_info['job_params'][job_id] # if it's error, get the error. if job_id in job_info['check_error']: error = job_info['check_error'][job_id] error.update({'job_id': job_id}) child_job_states.append(error) continue # if it's done, get the output mapping. state = job_info['job_states'][job_id] if state.get('finished', 0) == 1: try: widget_info = Job.map_viewer_params( state, params['params'], app_id, app_tag ) except ValueError: widget_info = {} state.update({'widget_info': widget_info}) child_job_states.append(state) return child_job_states def _construct_job_status_set(self, job_ids): job_states = self._get_all_job_states(job_ids) status_set = dict() for job_id in job_ids: job = None if job_id in self._running_jobs: job = self._running_jobs[job_id]['job'] status_set[job_id] = self._construct_job_status(job, job_states.get(job_id, None)) return status_set def _verify_job_parentage(self, parent_job_id, child_job_id): """ Validate job relationships. 1. Make sure parent exists, and the child id is in its list of sub jobs. 2. If child doesn't exist, create it and add it to the list. If parent doesn't exist, or child isn't an actual child, raise an exception """ if parent_job_id not in self._running_jobs: raise ValueError('Parent job id {} not found, cannot validate child job {}.'.format(parent_job_id, child_job_id)) if child_job_id not in self._running_jobs: parent_job = self.get_job(parent_job_id) parent_state = parent_job.state() if child_job_id not in parent_state.get('sub_jobs', []): raise ValueError('Child job id {} is not a child of parent job {}'.format(child_job_id, parent_job_id)) else: self._create_jobs([child_job_id]) # injects its app id and version child_job = self.get_job(child_job_id) child_job.app_id = parent_job.meta.get('batch_app') child_job.tag = parent_job.meta.get('batch_tag', 'release') def _lookup_job_status(self, job_id, parent_job_id=None): """ Will raise a ValueError if job_id doesn't exist. Sends the status over the comm channel as the usual job_status message. """ # if parent_job is real, and job_id (the child) is not, just add it to the # list of running jobs and work as normal. if parent_job_id is not None: self._verify_job_parentage(parent_job_id, job_id) job = self._running_jobs.get(job_id, {}).get('job', None) state = self._get_job_state(job_id) status = self._construct_job_status(job, state) self._send_comm_message('job_status', status) def _lookup_job_info(self, job_id, parent_job_id=None): """ Will raise a ValueError if job_id doesn't exist. Sends the info over the comm channel as this packet: { app_id: module/name, app_name: random string, job_id: string, job_params: dictionary } """ # if parent_job is real, and job_id (the child) is not, just add it to the # list of running jobs and work as normal. if parent_job_id is not None: self._verify_job_parentage(parent_job_id, job_id) job = self.get_job(job_id) info = { 'app_id': job.app_id, 'app_name': job.app_spec()['info']['name'], 'job_id': job_id, 'job_params': job.inputs } self._send_comm_message('job_info', info) def _lookup_all_job_status(self, ignore_refresh_flag=False): """ Looks up status for all jobs. Once job info is acquired, it gets pushed to the front end over the 'KBaseJobs' channel. """ jobs_to_lookup = list() # grab the list of running job ids, so we don't run into update-while-iterating problems. for job_id in self._running_jobs.keys(): if self._running_jobs[job_id]['refresh'] > 0 or ignore_refresh_flag: jobs_to_lookup.append(job_id) if len(jobs_to_lookup) > 0: status_set = self._construct_job_status_set(jobs_to_lookup) self._send_comm_message('job_status_all', status_set) return len(jobs_to_lookup) def _start_job_status_loop(self): kblogging.log_event(self._log, 'starting job status loop', {}) if self._lookup_timer is None: self._lookup_job_status_loop() def _lookup_job_status_loop(self): """ Initialize a loop that will look up job info. This uses a Timer thread on a 10 second loop to update things. """ refreshing_jobs = self._lookup_all_job_status() # Automatically stop when there are no more jobs requesting a refresh. if refreshing_jobs == 0: self.cancel_job_lookup_loop() else: self._lookup_timer = threading.Timer(10, self._lookup_job_status_loop) self._lookup_timer.start() def cancel_job_lookup_loop(self): """ Cancels a running timer if one's still alive. """ if self._lookup_timer: self._lookup_timer.cancel() self._lookup_timer = None self._running_lookup_loop = False def register_new_job(self, job): """ Registers a new Job with the manager - should only be invoked when a new Job gets started. This stores the Job locally and pushes it over the comm channel to the Narrative where it gets serialized. Parameters: ----------- job : biokbase.narrative.jobs.job.Job object The new Job that was started. """ self._running_jobs[job.job_id] = {'job': job, 'refresh': 0} # push it forward! create a new_job message. self._lookup_job_status(job.job_id) self._send_comm_message('new_job', { 'job_id': job.job_id }) def get_job(self, job_id): """ Returns a Job with the given job_id. Raises a ValueError if not found. """ if job_id in self._running_jobs: return self._running_jobs[job_id]['job'] else: raise ValueError('No job present with id {}'.format(job_id)) def _handle_comm_message(self, msg): """ Handles comm messages that come in from the other end of the KBaseJobs channel. All messages (of any use) should have a 'request_type' property. Possible types: * all_status refresh all jobs that are flagged to be looked up. Will send a message back with all lookup status. * job_status refresh the single job given in the 'job_id' field. Sends a message back with that single job's status, or an error message. * stop_update_loop stop the running refresh loop, if there's one going (might be one more pass, depending on the thread state) * start_update_loop reinitialize the refresh loop. * stop_job_update flag the given job id (should be an accompanying 'job_id' field) that the front end knows it's in a terminal state and should no longer have its status looked up in the refresh cycle. * start_job_update remove the flag that gets set by stop_job_update (needs an accompanying 'job_id' field) * job_info from the given 'job_id' field, returns some basic info about the job, including the app id, version, app name, and key-value pairs for inputs and parameters (in the parameters id namespace specified by the app spec). """ if 'request_type' in msg['content']['data']: r_type = msg['content']['data']['request_type'] job_id = msg['content']['data'].get('job_id', None) parent_job_id = msg['content']['data'].get('parent_job_id', None) if job_id is not None and job_id not in self._running_jobs and not parent_job_id: # If it's not a real job, just silently ignore the request. # Unless it has a parent job id, then its a child job, so things get muddled. If there's 100+ child jobs, # then this might get tricky to look up all of them. Let it pass through and fail if it's not real. # # TODO: perhaps we should implement request/response here. All we really need is to thread a message # id through self._send_comm_message('job_does_not_exist', {'job_id': job_id, 'request_type': r_type}) return elif parent_job_id is not None: try: self._verify_job_parentage(parent_job_id, job_id) except ValueError as e: self._send_comm_message('job_does_not_exist', {'job_id': job_id, 'parent_job_id': parent_job_id, 'request_type': r_type}) if r_type == 'all_status': self._lookup_all_job_status(ignore_refresh_flag=True) elif r_type == 'job_status': if job_id is not None: self._lookup_job_status(job_id, parent_job_id=parent_job_id) elif r_type == 'job_info': if job_id is not None: self._lookup_job_info(job_id, parent_job_id=parent_job_id) elif r_type == 'stop_update_loop': self.cancel_job_lookup_loop() elif r_type == 'start_update_loop': self._start_job_status_loop() elif r_type == 'stop_job_update': if job_id is not None: if self._running_jobs[job_id]['refresh'] > 0: self._running_jobs[job_id]['refresh'] -= 1 elif r_type == 'start_job_update': if job_id is not None: self._running_jobs[job_id]['refresh'] += 1 self._start_job_status_loop() elif r_type == 'delete_job': if job_id is not None: try: self.delete_job(job_id, parent_job_id=parent_job_id) except Exception as e: self._send_comm_message('job_comm_error', {'message': str(e), 'request_type': r_type, 'job_id': job_id}) elif r_type == 'cancel_job': if job_id is not None: try: self.cancel_job(job_id, parent_job_id=parent_job_id) except Exception as e: self._send_comm_message('job_comm_error', {'message': str(e), 'request_type': r_type, 'job_id': job_id}) elif r_type == 'job_logs': if job_id is not None: first_line = msg['content']['data'].get('first_line', 0) num_lines = msg['content']['data'].get('num_lines', None) self._get_job_logs(job_id, parent_job_id=parent_job_id, first_line=first_line, num_lines=num_lines) else: raise ValueError('Need a job id to fetch jobs!') elif r_type == 'job_logs_latest': if job_id is not None: num_lines = msg['content']['data'].get('num_lines', None) try: self._get_latest_job_logs(job_id, parent_job_id=parent_job_id, num_lines=num_lines) except Exception as e: self._send_comm_message('job_comm_error', { 'job_id': job_id, 'message': str(e), 'request_type': r_type}) else: raise ValueError('Need a job id to fetch jobs!') else: self._send_comm_message('job_comm_error', {'message': 'Unknown message', 'request_type': r_type}) raise ValueError('Unknown KBaseJobs message "{}"'.format(r_type)) def _get_latest_job_logs(self, job_id, parent_job_id=None, num_lines=None): job = self.get_job(job_id) if job is None: raise ValueError('job "{}" not found while fetching logs!'.format(job_id)) (max_lines, logs) = job.log() first_line = 0 if num_lines is not None and max_lines > num_lines: first_line = max_lines - num_lines logs = logs[first_line:] self._send_comm_message('job_logs', { 'job_id': job_id, 'first': first_line, 'max_lines': max_lines, 'lines': logs, 'latest': True}) def _get_job_logs(self, job_id, parent_job_id=None, first_line=0, num_lines=None): # if parent_job is real, and job_id (the child) is not, just add it to the # list of running jobs and work as normal. job = self.get_job(job_id) if job is None: raise ValueError('job "{}" not found!'.format(job_id)) (max_lines, log_slice) = job.log(first_line=first_line, num_lines=num_lines) self._send_comm_message('job_logs', {'job_id': job_id, 'first': first_line, 'max_lines': max_lines, 'lines': log_slice, 'latest': False}) def delete_job(self, job_id, parent_job_id=None): """ If the job_id doesn't exist, raises a ValueError. Attempts to delete a job, and cancels it first. If the job cannot be canceled, raises an exception. If it can be canceled but not deleted, it gets canceled, then raises an exception. """ if job_id is None: raise ValueError('Job id required for deletion!') if not parent_job_id and job_id not in self._running_jobs: self._send_comm_message('job_does_not_exist', {'job_id': job_id, 'source': 'delete_job'}) return # raise ValueError('Attempting to cancel a Job that does not exist!') try: self.cancel_job(job_id, parent_job_id=parent_job_id) except Exception: raise try: clients.get('user_and_job_state').delete_job(job_id) except Exception: raise if job_id in self._running_jobs: del self._running_jobs[job_id] if job_id in self._completed_job_states: del self._completed_job_states[job_id] self._send_comm_message('job_deleted', {'job_id': job_id}) def cancel_job(self, job_id, parent_job_id=None): """ Cancels a running job, placing it in a canceled state. Does NOT delete the job. Raises an exception if the current user doesn't have permission to cancel the job. """ if job_id is None: raise ValueError('Job id required for cancellation!') if not parent_job_id and job_id not in self._running_jobs: self._send_comm_message('job_does_not_exist', {'job_id': job_id, 'source': 'cancel_job'}) return try: state = self._get_job_state(job_id, parent_job_id=parent_job_id) if state.get('canceled', 0) == 1 or state.get('finished', 0) == 1: # It's already finished, don't try to cancel it again. return except Exception as e: raise ValueError('Unable to get Job state') # Stop updating the job status while we try to cancel. # Also, set it to have a special state of 'canceling' while we're doing the cancel if not parent_job_id: is_refreshing = self._running_jobs[job_id].get('refresh', 0) self._running_jobs[job_id]['refresh'] = 0 self._running_jobs[job_id]['canceling'] = True try: clients.get('job_service').cancel_job({'job_id': job_id}) except Exception as e: new_e = transform_job_exception(e) error = { 'error': 'Unable to get cancel job', 'message': getattr(new_e, 'message', 'Unknown reason'), 'code': getattr(new_e, 'code', -1), 'source': getattr(new_e, 'source', 'jobmanager'), 'name': getattr(new_e, 'name', type(e).__name__), 'request_type': 'cancel_job', 'job_id': job_id } self._send_comm_message('job_comm_error', error) raise(e) finally: if not parent_job_id: self._running_jobs[job_id]['refresh'] = is_refreshing del self._running_jobs[job_id]['canceling'] # Rather than a separate message, how about triggering a job-status message: self._lookup_job_status(job_id, parent_job_id=parent_job_id) def _send_comm_message(self, msg_type, content): """ Sends a ipykernel.Comm message to the KBaseJobs channel with the given msg_type and content. These just get encoded into the message itself. """ msg = { 'msg_type': msg_type, 'content': content } if self._comm is None: self._comm = Comm(target_name='KBaseJobs', data={}) self._comm.on_msg(self._handle_comm_message) self._comm.send(msg) def _get_all_job_states(self, job_ids=None): """ Returns the state for all running jobs """ # 1. Get list of ids if job_ids is None: job_ids = self._running_jobs.keys() # 1.5 Go through job ids and remove ones that aren't found. job_ids = [j for j in job_ids if j in self._running_jobs] # 2. Foreach, check if in completed cache. If so, grab the status. If not, enqueue id # for batch lookup. job_states = dict() jobs_to_lookup = list() for job_id in job_ids: if job_id in self._completed_job_states: job_states[job_id] = dict(self._completed_job_states[job_id]) else: jobs_to_lookup.append(job_id) # 3. Lookup those jobs what need it. Cache 'em as we go, if finished. try: fetched_states = clients.get('job_service').check_jobs({'job_ids': jobs_to_lookup}) except Exception as e: kblogging.log_event(self._log, 'get_all_job_states_error', {'err': str(e)}) return {} error_states = fetched_states.get('check_errors', {}) fetched_states = fetched_states.get('job_states', {}) for job_id in jobs_to_lookup: if job_id in fetched_states: state = fetched_states[job_id] state['cell_id'] = self._running_jobs[job_id]['job'].cell_id state['run_id'] = self._running_jobs[job_id]['job'].run_id if state.get('finished', 0) == 1: self._completed_job_states[state['job_id']] = dict(state) job_states[state['job_id']] = state elif job_id in error_states: error = error_states[job_id] job_states[state['job_id']] = {'lookup_error': error} return job_states def _get_job_state(self, job_id, parent_job_id=None): if parent_job_id is not None: self._verify_job_parentage(parent_job_id, job_id) if job_id is None or job_id not in self._running_jobs: raise ValueError('job_id {} not found'.format(job_id)) if job_id in self._completed_job_states: return dict(self._completed_job_states[job_id]) state = self._running_jobs[job_id]['job'].state() if state.get('finished', 0) == 1: self._completed_job_states[job_id] = dict(state) return dict(state)
class CommSocket(object): """ Manages the Comm connection between IPython and the browser (client). Comms are 2 way, with the CommSocket being able to publish a message via the send_json method, and handle a message with on_message. On the JS side figure.send_message and figure.ws.onmessage do the sending and receiving respectively. """ def __init__(self, manager): self.supports_binary = None self.manager = manager self.uuid = str(uuid()) # Publish an output area with a unique ID. The javascript can then # hook into this area. display(HTML("<div id=%r></div>" % self.uuid)) try: self.comm = Comm('matplotlib', data={'id': self.uuid}) except AttributeError: raise RuntimeError('Unable to create an IPython notebook Comm ' 'instance. Are you in the IPython notebook?') self.comm.on_msg(self.on_message) manager = self.manager self._ext_close = False def _on_close(close_message): self._ext_close = True manager.remove_comm(close_message['content']['comm_id']) manager.clearup_closed() self.comm.on_close(_on_close) def is_open(self): return not (self._ext_close or self.comm._closed) def on_close(self): # When the socket is closed, deregister the websocket with # the FigureManager. if self.is_open(): try: self.comm.close() except KeyError: # apparently already cleaned it up? pass def send_json(self, content): self.comm.send({'data': json.dumps(content)}) def send_binary(self, blob): # The comm is ascii, so we always send the image in base64 # encoded data URL form. data = b64encode(blob) if six.PY3: data = data.decode('ascii') data_uri = "data:image/png;base64,{0}".format(data) self.comm.send({'data': data_uri}) def on_message(self, message): # The 'supports_binary' message is relevant to the # websocket itself. The other messages get passed along # to matplotlib as-is. # Every message has a "type" and a "figure_id". message = json.loads(message['content']['data']) if message['type'] == 'closing': self.on_close() self.manager.clearup_closed() elif message['type'] == 'supports_binary': self.supports_binary = message['value'] else: self.manager.handle_json(message)
class JobComm: """ The main JobComm channel. This is the kernel-side of the connection, and routes requests for job information from various app cells (or the front end in general) to the right function. This has a handle on the JobManager, which does the work of fetching job information and statuses. The JobComm officially exposes the channel for other things to use. Anything that needs to send messages about Jobs to the front end should use JobComm.send_comm_message. It also maintains the lookup loop thread. This is a threading.Timer that, after some interval, will lookup the status of all running jobs. If there are no jobs to look up, this cancels itself. Allowed messages: * job_status - return the job state for a single job (requires a job_id) * job_status_all - return job state for all jobs in this Narrative. * job_info - return basic job info for a single job (requires a job_id) * start_job_update - tells the update loop to include a job when updating (requires a job_id) * stop_job_update - has the update loop not include a job when updating (requires a job_id) * cancel_job - cancels a running job, if it hasn't otherwise terminated (requires a job_id) * retry_job - retries a job (requires a job_id) * job_logs - sends job logs back over the comm channel (requires a job id) """ # An instance of this class. It's meant to be a singleton, so this just gets created and # returned once. __instance = None # The kernel job comm channel that talks to the front end. _comm = None # The JobManager that actually manages things. _jm = None _msg_map = None _running_lookup_loop = False _lookup_timer = None _log = kblogging.get_logger(__name__) def __new__(cls): if JobComm.__instance is None: JobComm.__instance = object.__new__(cls) return JobComm.__instance def __init__(self): if self._comm is None: self._comm = Comm(target_name="KBaseJobs", data={}) self._comm.on_msg(self._handle_comm_message) if self._jm is None: self._jm = JobManager() if self._msg_map is None: self._msg_map = { MESSAGE_TYPE["CANCEL"]: self._cancel_jobs, MESSAGE_TYPE["CELL_JOB_STATUS"]: self._get_job_states_by_cell_id, MESSAGE_TYPE["INFO"]: self._get_job_info, MESSAGE_TYPE["LOGS"]: self._get_job_logs, MESSAGE_TYPE["RETRY"]: self._retry_jobs, MESSAGE_TYPE["START_UPDATE"]: self._modify_job_updates, MESSAGE_TYPE["STATUS"]: self._get_job_states, MESSAGE_TYPE["STATUS_ALL"]: self._get_all_job_states, MESSAGE_TYPE["STOP_UPDATE"]: self._modify_job_updates, } def _get_job_ids(self, req: JobRequest = None): if req.has_batch_id(): return self._jm.update_batch_job(req.batch_id) try: return req.job_id_list except Exception as ex: raise JobRequestException(ONE_INPUT_TYPE_ONLY_ERR) from ex def start_job_status_loop( self, init_jobs: bool = False, cell_list: List[str] = None, ) -> None: """ Starts the job status lookup loop. This runs every LOOKUP_TIMER_INTERVAL seconds. :param init_jobs: If init_jobs=True, this attempts to (re-)initialize the JobManager's list of known jobs from the workspace. :param cell_list: from FE, the list of extant cell IDs """ self._running_lookup_loop = True if init_jobs: try: self._jm.initialize_jobs(cell_list) except Exception as e: error = { "error": "Unable to get initial jobs list", "message": getattr(e, "message", UNKNOWN_REASON), "code": getattr(e, "code", -1), "source": getattr(e, "source", "jobmanager"), "name": getattr(e, "name", type(e).__name__), } self.send_comm_message(MESSAGE_TYPE["ERROR"], error) # if job init failed, set the lookup loop var back to False and return self._running_lookup_loop = False return if self._lookup_timer is None: self._lookup_job_status_loop() def stop_job_status_loop(self, *args, **kwargs) -> None: """ Stops the job status lookup loop if it's running. Otherwise, this effectively does nothing. """ if self._lookup_timer: self._lookup_timer.cancel() self._lookup_timer = None self._running_lookup_loop = False def _lookup_job_status_loop(self) -> None: """ Run a loop that will look up job info. After running, this spawns a Timer thread on a loop to run itself again. LOOKUP_TIMER_INTERVAL sets the frequency at which the loop runs. """ all_job_states = self._get_all_job_states() if len(all_job_states) == 0 or not self._running_lookup_loop: self.stop_job_status_loop() else: self._lookup_timer = threading.Timer( LOOKUP_TIMER_INTERVAL, self._lookup_job_status_loop ) self._lookup_timer.start() def _get_all_job_states( self, req: JobRequest = None, ignore_refresh_flag: bool = False ) -> dict: """ Fetches status of all jobs in the current workspace and sends them to the front end. req can be None, as it's not used. """ all_job_states = self._jm.get_all_job_states( ignore_refresh_flag=ignore_refresh_flag ) self.send_comm_message(MESSAGE_TYPE["STATUS_ALL"], all_job_states) return all_job_states def _get_job_states_by_cell_id(self, req: JobRequest = None) -> dict: """ Fetches status of all jobs associated with the given cell ID(s) :param req: a JobRequest with the cell_id_list of interest :returns: dict in the form { "jobs": { # dict with job IDs as keys and job states as values "job_one": { ... }, "job_two": { ... }, }, "mapping": { # dict with cell IDs as keys and values being the set of job IDs associated # with that cell "cell_one": [ "job_one", "job_two", ... ], "cell_two": [ ... ], } } """ cell_job_states = self._jm.get_job_states_by_cell_id( cell_id_list=req.cell_id_list ) self.send_comm_message(MESSAGE_TYPE["CELL_JOB_STATUS"], cell_job_states) return cell_job_states def _get_job_info(self, req: JobRequest) -> dict: """ Look up job info. This is just some high-level generic information about the running job, including the app id, name, and job parameters. :param req: a JobRequest with the job_id_list of interest :returns: a dict keyed with job IDs and with values of dicts with the following keys: - app_id - str - module/name, - app_name - str - name of the app as it shows up in the Narrative interface - batch_id - str - the batch parent ID (if appropriate) - job_id - str - just re-reporting the id string - job_params - dict - the params that were passed to that particular job """ job_id_list = self._get_job_ids(req) job_info = self._jm.get_job_info(job_id_list) self.send_comm_message(MESSAGE_TYPE["INFO"], job_info) return job_info def __get_job_states(self, job_id_list) -> dict: """ Look up job states. Returns a dictionary of job state information indexed by job ID. """ output_states = self._jm.get_job_states(job_id_list) self.send_comm_message(MESSAGE_TYPE["STATUS"], output_states) return output_states def get_job_state(self, job_id: str) -> dict: """ This differs from the _get_job_state (underscored version) in that it just takes a job_id string, not a JobRequest. """ return self.__get_job_states([job_id]) def _get_job_states(self, req: JobRequest) -> dict: job_id_list = self._get_job_ids(req) return self.__get_job_states(job_id_list) def _modify_job_updates(self, req: JobRequest) -> dict: """ Modifies how many things want to listen to a job update. If this is a request to start a job update, then this starts the update loop that returns update messages across the job channel. If this is a request to stop a job update, then this sends that request to the JobManager, which might have the side effect of shutting down the update loop if there's no longer anything requesting job status. If the given job_id in the request doesn't exist in the current Narrative, or is None, this raises a JobRequestException. """ job_id_list = self._get_job_ids(req) update_type = req.request_type if update_type == MESSAGE_TYPE["START_UPDATE"]: update_refresh = True elif update_type == MESSAGE_TYPE["STOP_UPDATE"]: update_refresh = False else: # this should be impossible raise JobRequestException("Unknown request") self._jm.modify_job_refresh(job_id_list, update_refresh) if update_refresh: self.start_job_status_loop() output_states = self._jm.get_job_states(job_id_list) self.send_comm_message(MESSAGE_TYPE["STATUS"], output_states) return output_states def _cancel_jobs(self, req: JobRequest) -> dict: """ This cancels a running job. If there are no valid jobs, this raises a JobRequestException. If there's an error while attempting to cancel, this raises a NarrativeError. In the end, after a successful cancel, this finishes up by fetching and returning the job state with the new status. """ job_id_list = self._get_job_ids(req) cancel_results = self._jm.cancel_jobs(job_id_list) self.send_comm_message(MESSAGE_TYPE["STATUS"], cancel_results) return cancel_results def _retry_jobs(self, req: JobRequest) -> dict: job_id_list = self._get_job_ids(req) retry_results = self._jm.retry_jobs(job_id_list) self.send_comm_message(MESSAGE_TYPE["RETRY"], retry_results) return retry_results def _get_job_logs(self, req: JobRequest) -> dict: """ This returns a set of job logs based on the info in the request. """ job_id_list = self._get_job_ids(req) log_output = self._jm.get_job_logs_for_list( job_id_list, num_lines=req.rq_data.get("num_lines", None), first_line=req.rq_data.get("first_line", 0), latest=req.rq_data.get("latest", False), ) self.send_comm_message(MESSAGE_TYPE["LOGS"], log_output) return log_output def _handle_comm_message(self, msg: dict) -> dict: """ Handles comm messages that come in from the other end of the KBaseJobs channel. Messages get translated into one or more JobRequest objects, which are then passed to the right handler, based on the request. A handler dictionary is created on JobComm creation. Any unknown request is returned over the channel with message type 'job_error', and a JobRequestException is raised. """ with exc_to_msg(msg): request = JobRequest(msg) kblogging.log_event( self._log, "handle_comm_message", {"msg": request.request_type} ) if request.request_type not in self._msg_map: raise JobRequestException( f"Unknown KBaseJobs message '{request.request_type}'" ) return self._msg_map[request.request_type](request) def send_comm_message(self, msg_type: str, content: dict) -> None: """ Sends a ipykernel.Comm message to the KBaseJobs channel with the given msg_type and content. These just get encoded into the message itself. """ msg = {"msg_type": msg_type, "content": content} self._comm.send(msg) def send_error_message( self, req: Union[JobRequest, dict, str], content: dict = None ) -> None: """ Sends a comm message over the KBaseJobs channel as an error. This will have msg_type set to ERROR ('job_error'), and include the original request in the message content as "source". req can be the original request message or its JobRequest form. Since the latter is made from the former, they have the same information. It can also be a string or None if this context manager is invoked outside of a JC request This sends a packet that looks like: { request: the original JobRequest data object, function params, or function name source: the function request that spawned the error other fields about the error, dependent on the content. } """ error_content = {} if isinstance(req, JobRequest): error_content["request"] = req.rq_data error_content["source"] = req.request_type elif isinstance(req, dict): data = req.get("content", {}).get("data", {}) error_content["request"] = data error_content["source"] = data.get("request_type") elif isinstance(req, str) or req is None: error_content["request"] = req error_content["source"] = req if content is not None: error_content.update(content) self.send_comm_message(MESSAGE_TYPE["ERROR"], error_content)
class Widget(LoggingConfigurable): #------------------------------------------------------------------------- # Class attributes #------------------------------------------------------------------------- _widget_construction_callback = None widgets = {} widget_types = {} @staticmethod def on_widget_constructed(callback): """Registers a callback to be called when a widget is constructed. The callback must have the following signature: callback(widget)""" Widget._widget_construction_callback = callback @staticmethod def _call_widget_constructed(widget): """Static method, called when a widget is constructed.""" if Widget._widget_construction_callback is not None and callable( Widget._widget_construction_callback): Widget._widget_construction_callback(widget) @staticmethod def handle_comm_opened(comm, msg): """Static method, called when a widget is constructed.""" widget_class = import_item(str(msg['content']['data']['widget_class'])) widget = widget_class(comm=comm) #------------------------------------------------------------------------- # Traits #------------------------------------------------------------------------- _model_module = Unicode(None, allow_none=True, help="""A requirejs module name in which to find _model_name. If empty, look in the global registry.""" ) _model_name = Unicode('WidgetModel', help="""Name of the backbone model registered in the front-end to create and sync this widget with.""") _view_module = Unicode( help="""A requirejs module in which to find _view_name. If empty, look in the global registry.""", sync=True) _view_name = Unicode(None, allow_none=True, help="""Default view registered in the front-end to use to represent the widget.""", sync=True) comm = Instance('ipykernel.comm.Comm', allow_none=True) msg_throttle = Int(3, sync=True, help="""Maximum number of msgs the front-end can send before receiving an idle msg from the back-end.""") version = Int(0, sync=True, help="""Widget's version""") keys = List() def _keys_default(self): return [name for name in self.traits(sync=True)] _property_lock = Dict() _send_state_lock = Int(0) _states_to_send = Set() _display_callbacks = Instance(CallbackDispatcher, ()) _msg_callbacks = Instance(CallbackDispatcher, ()) #------------------------------------------------------------------------- # (Con/de)structor #------------------------------------------------------------------------- def __init__(self, **kwargs): """Public constructor""" self._model_id = kwargs.pop('model_id', None) super(Widget, self).__init__(**kwargs) Widget._call_widget_constructed(self) self.open() def __del__(self): """Object disposal""" self.close() #------------------------------------------------------------------------- # Properties #------------------------------------------------------------------------- def open(self): """Open a comm to the frontend if one isn't already open.""" if self.comm is None: args = dict(target_name='ipython.widget', data={ 'model_name': self._model_name, 'model_module': self._model_module }) if self._model_id is not None: args['comm_id'] = self._model_id self.comm = Comm(**args) def _comm_changed(self, name, new): """Called when the comm is changed.""" if new is None: return self._model_id = self.model_id self.comm.on_msg(self._handle_msg) Widget.widgets[self.model_id] = self # first update self.send_state() @property def model_id(self): """Gets the model id of this widget. If a Comm doesn't exist yet, a Comm will be created automagically.""" return self.comm.comm_id #------------------------------------------------------------------------- # Methods #------------------------------------------------------------------------- def close(self): """Close method. Closes the underlying comm. When the comm is closed, all of the widget views are automatically removed from the front-end.""" if self.comm is not None: Widget.widgets.pop(self.model_id, None) self.comm.close() self.comm = None def send_state(self, key=None): """Sends the widget state, or a piece of it, to the front-end. Parameters ---------- key : unicode, or iterable (optional) A single property's name or iterable of property names to sync with the front-end. """ state, buffer_keys, buffers = self.get_state(key=key) msg = {"method": "update", "state": state} if buffer_keys: msg['buffers'] = buffer_keys self._send(msg, buffers=buffers) def get_state(self, key=None): """Gets the widget state, or a piece of it. Parameters ---------- key : unicode or iterable (optional) A single property's name or iterable of property names to get. Returns ------- state : dict of states buffer_keys : list of strings the values that are stored in buffers buffers : list of binary memoryviews values to transmit in binary metadata : dict metadata for each field: {key: metadata} """ if key is None: keys = self.keys elif isinstance(key, string_types): keys = [key] elif isinstance(key, collections.Iterable): keys = key else: raise ValueError( "key must be a string, an iterable of keys, or None") state = {} buffers = [] buffer_keys = [] for k in keys: f = self.trait_metadata(k, 'to_json', self._trait_to_json) value = getattr(self, k) serialized = f(value) if isinstance(serialized, memoryview): buffers.append(serialized) buffer_keys.append(k) else: state[k] = serialized return state, buffer_keys, buffers def set_state(self, sync_data): """Called when a state is received from the front-end.""" # The order of these context managers is important. Properties must # be locked when the hold_trait_notification context manager is # released and notifications are fired. with self._lock_property(**sync_data), self.hold_trait_notifications(): for name in sync_data: if name in self.keys: from_json = self.trait_metadata(name, 'from_json', self._trait_from_json) setattr(self, name, from_json(sync_data[name])) def send(self, content, buffers=None): """Sends a custom msg to the widget model in the front-end. Parameters ---------- content : dict Content of the message to send. buffers : list of binary buffers Binary buffers to send with message """ self._send({"method": "custom", "content": content}, buffers=buffers) def on_msg(self, callback, remove=False): """(Un)Register a custom msg receive callback. Parameters ---------- callback: callable callback will be passed three arguments when a message arrives:: callback(widget, content, buffers) remove: bool True if the callback should be unregistered.""" self._msg_callbacks.register_callback(callback, remove=remove) def on_displayed(self, callback, remove=False): """(Un)Register a widget displayed callback. Parameters ---------- callback: method handler Must have a signature of:: callback(widget, **kwargs) kwargs from display are passed through without modification. remove: bool True if the callback should be unregistered.""" self._display_callbacks.register_callback(callback, remove=remove) def add_trait(self, traitname, trait): """Dynamically add a trait attribute to the Widget.""" super(Widget, self).add_trait(traitname, trait) if trait.get_metadata('sync'): self.keys.append(traitname) self.send_state(traitname) #------------------------------------------------------------------------- # Support methods #------------------------------------------------------------------------- @contextmanager def _lock_property(self, **properties): """Lock a property-value pair. The value should be the JSON state of the property. NOTE: This, in addition to the single lock for all state changes, is flawed. In the future we may want to look into buffering state changes back to the front-end.""" self._property_lock = properties try: yield finally: self._property_lock = {} @contextmanager def hold_sync(self): """Hold syncing any state until the context manager is released""" # We increment a value so that this can be nested. Syncing will happen when # all levels have been released. self._send_state_lock += 1 try: yield finally: self._send_state_lock -= 1 if self._send_state_lock == 0: self.send_state(self._states_to_send) self._states_to_send.clear() def _should_send_property(self, key, value): """Check the property lock (property_lock)""" to_json = self.trait_metadata(key, 'to_json', self._trait_to_json) if (key in self._property_lock and to_json(value) == self._property_lock[key]): return False elif self._send_state_lock > 0: self._states_to_send.add(key) return False else: return True # Event handlers @_show_traceback def _handle_msg(self, msg): """Called when a msg is received from the front-end""" data = msg['content']['data'] method = data['method'] # Handle backbone sync methods CREATE, PATCH, and UPDATE all in one. if method == 'backbone': if 'sync_data' in data: # get binary buffers too sync_data = data['sync_data'] for i, k in enumerate(data.get('buffer_keys', [])): sync_data[k] = msg['buffers'][i] self.set_state(sync_data) # handles all methods # Handle a state request. elif method == 'request_state': self.send_state() # Handle a custom msg from the front-end. elif method == 'custom': if 'content' in data: self._handle_custom_msg(data['content'], msg['buffers']) # Catch remainder. else: self.log.error( 'Unknown front-end to back-end widget msg with method "%s"' % method) def _handle_custom_msg(self, content, buffers): """Called when a custom msg is received.""" self._msg_callbacks(self, content, buffers) def _notify_trait(self, name, old_value, new_value): """Called when a property has been changed.""" # Trigger default traitlet callback machinery. This allows any user # registered validation to be processed prior to allowing the widget # machinery to handle the state. LoggingConfigurable._notify_trait(self, name, old_value, new_value) # Send the state after the user registered callbacks for trait changes # have all fired (allows for user to validate values). if self.comm is not None and name in self.keys: # Make sure this isn't information that the front-end just sent us. if self._should_send_property(name, new_value): # Send new state to front-end self.send_state(key=name) def _handle_displayed(self, **kwargs): """Called when a view has been displayed for this widget instance""" self._display_callbacks(self, **kwargs) def _trait_to_json(self, x): """Convert a trait value to json.""" return x def _trait_from_json(self, x): """Convert json values to objects.""" return x def _ipython_display_(self, **kwargs): """Called when `IPython.display.display` is called on the widget.""" # Show view. if self._view_name is not None: self._send({"method": "display"}) self._handle_displayed(**kwargs) def _send(self, msg, buffers=None): """Sends a message to the model in the front-end.""" self.comm.send(data=msg, buffers=buffers)
class JobComm: """ The main JobComm channel. This is the kernel-side of the connection, and routes requests for job information from various app cells (or the front end in general) to the right function. This has a handle on the JobManager, which does the work of fetching job information and statuses. The JobComm officially exposes the channel for other things to use. Anything that needs to send messages about Jobs to the front end should use JobComm.send_comm_message. It also maintains the lookup loop thread. This is a threading.Timer that, after some interval, will lookup the status of all running jobs. If there are no jobs to look up, this cancels itself. Allowed messages: * all_status - return job state for all jobs in this Narrative. * job_status - return the job state for a single job (requires a job_id) * job_info - return basic job info for a single job (requires a job_id) * start_update_loop - starts a looping thread that runs returns all job info for running jobs * stop_update_loop - stops the automatic update loop * start_job_update - tells the update loop to include a job when updating (requires a job_id) * stop_job_update - has the update loop not include a job when updating (requires a job_id) * cancel_job - cancels a running job, if it hasn't otherwise terminated (requires a job_id) * job_logs - sends job logs back over the comm channel (requires a job id and first line) * job_logs_latest - sends the most recent job logs over the comm channel (requires a job_id) """ # An instance of this class. It's meant to be a singleton, so this just gets created and # returned once. __instance = None # The kernel job comm channel that talks to the front end. _comm = None # The JobManager that actually manages things. _jm = None _msg_map = None _running_lookup_loop = False _lookup_timer = None _log = kblogging.get_logger(__name__) def __new__(cls): if JobComm.__instance is None: JobComm.__instance = object.__new__(cls) return JobComm.__instance def __init__(self): if self._comm is None: self._comm = Comm(target_name="KBaseJobs", data={}) self._comm.on_msg(self._handle_comm_message) if self._jm is None: self._jm = jobmanager.JobManager() if self._msg_map is None: self._msg_map = { "all_status": self._lookup_all_job_states, "job_status": self._lookup_job_state, "job_info": self._lookup_job_info, "start_update_loop": self.start_job_status_loop, "stop_update_loop": self.stop_job_status_loop, "start_job_update": self._modify_job_update, "stop_job_update": self._modify_job_update, "cancel_job": self._cancel_job, "job_logs": self._get_job_logs, "job_logs_latest": self._get_job_logs } def _verify_job_id(self, req: JobRequest) -> None: if req.job_id is None: self.send_error_message("job_does_not_exist", req) raise ValueError( f"Job id required to process {req.request} request") def start_job_status_loop(self, *args, **kwargs) -> None: """ Starts the job status lookup loop. This runs every 10 seconds. This has the bare *args and **kwargs to handle the case where this comes in as a job channel request (gets a JobRequest arg), or has the "init_jobs" kwarg. If init_jobs=True, this attempts to reinitialize the JobManager's list of known jobs from the workspace. """ self._running_lookup_loop = True if kwargs.get("init_jobs", False): try: self._jm.initialize_jobs() except Exception as e: error = { "error": "Unable to get initial jobs list", "message": getattr(e, "message", "Unknown reason"), "code": getattr(e, "code", -1), "source": getattr(e, "source", "jobmanager"), "name": getattr(e, "name", type(e).__name__), "service": "execution_engine2" } self.send_comm_message("job_init_err", error) if self._lookup_timer is None: self._lookup_job_status_loop() def stop_job_status_loop(self, *args, **kwargs) -> None: """ Stops the job status lookup loop if it's running. Otherwise, this effectively does nothing. """ if self._lookup_timer: self._lookup_timer.cancel() self._lookup_timer = None self._running_lookup_loop = False def _lookup_job_status_loop(self) -> None: """ Run a loop that will look up job info. After running, this spawns a Timer thread on a 10 second loop to run itself again. """ job_statuses = self._lookup_all_job_states(None) if len(job_statuses) == 0 or not self._running_lookup_loop: self.stop_job_status_loop() else: self._lookup_timer = threading.Timer(10, self._lookup_job_status_loop) self._lookup_timer.start() def _lookup_all_job_states(self, req: JobRequest) -> dict: """ Fetches status of all jobs in the current workspace and sends them to the front end. req can be None, as it's not used. """ job_statuses = self._jm.lookup_all_job_states(ignore_refresh_flag=True) self.send_comm_message("job_status_all", job_statuses) return job_statuses def _lookup_job_info(self, req: JobRequest) -> dict: """ Looks up job info. This is just some high-level generic information about the running job, including the app id, name, and job parameters. :param req: a JobRequest with the job_id of interest :returns: a dict with the following keys: - app_id - str - module/name, - app_name - str - name of the app as it shows up in the Narrative interface - job_id - str - just re-reporting the id string - job_params - dict - the params that were passed to that particular job """ self._verify_job_id(req) try: job_info = self._jm.lookup_job_info(req.job_id) self.send_comm_message("job_info", job_info) return job_info except ValueError as e: self.send_error_message("job_does_not_exist", req) raise def lookup_job_state(self, job_id: str) -> dict: """ This differs from the _lookup_job_state (underscored version) in that it just takes a job_id string, not a JobRequest. It, however, functions the same, by creating a JobRequest and forwarding it to the request version. Therefore, it sends the job message to the browser over the right channel, and also returns the job state (or raises a ValueError if not found). """ req = JobRequest({ "content": { "data": { "request_type": "job_status", "job_id": job_id } } }) return self._lookup_job_state(req) def _lookup_job_state(self, req: JobRequest) -> dict: """ Look up job state. """ self._verify_job_id(req) try: job_state = self._jm.get_job_state(req.job_id) self.send_comm_message("job_status", job_state) return job_state except ValueError as e: # kblogging.log_event(self._log, "lookup_job_state_error", {"err": str(e)}) self.send_error_message("job_does_not_exist", req) raise def _modify_job_update(self, req: JobRequest) -> None: """ Modifies how many things want to listen to a job update. If this is a request to start a job update, then this starts the update loop that returns update messages across the job channel. If this is a request to stop a job update, then this sends that request to the JobManager, which might have the side effect of shutting down the update loop if there's no longer anything requesting job status. If the given job_id in the request doesn't exist in the current Narrative, or is None, this raises a ValueError. """ self._verify_job_id(req) update_adjust = 1 if req.request == "start_job_update" else -1 self._jm.modify_job_refresh(req.job_id, update_adjust) if update_adjust == 1: self.start_job_status_loop() def _cancel_job(self, req: JobRequest) -> None: """ This cancels a running job. If the job has already been canceled, then nothing is done. If the job doesn't exist (or the job id in the request is None), this raises a ValueError. If there's an error while attempting to cancel, this raises a NarrativeError. In the end, after a successful cancel, this finishes up by fetching and returning the job state with the new status. """ self._verify_job_id(req) try: self._jm.cancel_job(req.job_id) except ValueError as e: self.send_error_message("job_does_not_exist", req) raise except NarrativeException as e: self.send_error_message( "job_comm_error", req, { "error": "Unable to cancel job", "message": getattr(e, "message", "Unknown reason"), "code": getattr(e, "code", -1), "name": getattr(e, "name", type(e).__name__) }) raise self._lookup_job_state(req) def _get_job_logs(self, req: JobRequest) -> None: """ This returns a set of job logs based on the info in the request. """ self._verify_job_id(req) first_line = req.rq_data.get("first_line", 0) num_lines = req.rq_data.get("num_lines", None) latest_only = req.request == "job_logs_latest" try: (first_line, max_lines, logs) = self._jm.get_job_logs(req.job_id, num_lines=num_lines, first_line=first_line, latest_only=latest_only) self.send_comm_message( "job_logs", { "job_id": req.job_id, "first": first_line, "max_lines": max_lines, "lines": logs, "latest": latest_only }) except ValueError as e: self.send_error_message("job_does_not_exist", req) raise except NarrativeException as e: self.send_error_message( "job_comm_error", req, { "error": "Unable to retrieve job logs", "message": getattr(e, "message", "Unknown reason"), "code": getattr(e, "code", -1), "name": getattr(e, "name", type(e).__name__) }) raise def _handle_comm_message(self, msg: dict) -> None: """ Handles comm messages that come in from the other end of the KBaseJobs channel. Messages get translated into a JobRequest object, which is then passed to the right handler, based on the request. A handler dictionary is created on JobComm creation. Any unknown request is returned over the channel as a job_comm_error, and a ValueError is raised. """ request = JobRequest(msg) kblogging.log_event(self._log, "handle_comm_message", {"msg": request.request}) if request.request in self._msg_map: self._msg_map[request.request](request) else: self.send_comm_message("job_comm_error", { "message": "Unknown message", "request_type": request.request }) raise ValueError(f"Unknown KBaseJobs message '{request.request}'") def send_comm_message(self, msg_type: str, content: dict) -> None: """ Sends a ipykernel.Comm message to the KBaseJobs channel with the given msg_type and content. These just get encoded into the message itself. """ msg = {"msg_type": msg_type, "content": content} self._comm.send(msg) def send_error_message(self, err_type: str, req: JobRequest, content: dict = None) -> None: """ Sends a comm message over the KBaseJobs channel as an error. This will have msg_type as whatever the error type is, and include the original request in the message content as "source". This sends a packet that looks like: { job_id: (string, if relevant), source: the original message that spawned the error, other fields about the error, dependent on the content. } """ error_content = {"job_id": req.job_id, "source": req.request} if content is not None: error_content.update(content) self.send_comm_message(err_type, error_content)
class Visualization(object): def __init__(self, session=None, json=None, auth=None): self.session = session self.id = json.get('id') self.auth = auth if self.session.lgn.ipython_enabled: from ipykernel.comm import Comm self.comm = Comm('lightning', {'id': self.id}) self.comm_handlers = {} self.comm.on_msg(self._handle_comm_message) def _format_url(self, url): if not url.endswith('/'): url += '/' try: from urllib.parse import quote except ImportError: from urllib import quote return url + '?host=' + quote(self.session.host) def _update_image(self, image): url = self.session.host + '/sessions/' + str(self.session.id) + '/visualizations/' + str(self.id) + '/data/images' url = self._format_url(url) files = {'file': image} return requests.put(url, files=files, data={'type': 'image'}, auth=self.auth) def _append_image(self, image): url = self.session.host + '/sessions/' + str(self.session.id) + '/visualizations/' + str(self.id) + '/data/images' url = self._format_url(url) files = {'file': image} return requests.post(url, files=files, data={'type': 'image'}, auth=self.auth) def _append_data(self, data=None, field=None): payload = {'data': data} headers = {'Content-type': 'application/json', 'Accept': 'text/plain'} url = self.session.host + '/sessions/' + str(self.session.id) + '/visualizations/' + str(self.id) + '/data/' if field: url += field url = self._format_url(url) return requests.post(url, data=json.dumps(payload), headers=headers, auth=self.auth) def _update_data(self, data=None, field=None): payload = {'data': data} headers = {'Content-type': 'application/json', 'Accept': 'text/plain'} url = self.session.host + '/sessions/' + str(self.session.id) + '/visualizations/' + str(self.id) + '/data/' if field: url += field url = self._format_url(url) return requests.put(url, data=json.dumps(payload), headers=headers, auth=self.auth) def get_permalink(self): return self.session.host + '/visualizations/' + str(self.id) def get_public_link(self): return self.get_permalink() + '/public/' def get_embed_link(self): return self._format_url(self.get_permalink() + '/embed') def get_html(self): r = requests.get(self.get_embed_link(), auth=self.auth) return r.text def open(self): webbrowser.open(self.get_public_link()) def delete(self): url = self.get_permalink() return requests.delete(url) def on(self, event_name, handler): if self.session.lgn.ipython_enabled: self.comm_handlers[event_name] = handler else: raise Exception('The current implementation of this method is only compatible with IPython.') def _handle_comm_message(self, message): # Parsing logic taken from similar code in matplotlib message = json.loads(message['content']['data']) if message['type'] in self.comm_handlers: self.comm_handlers[message['type']](message['data']) @classmethod def _create(cls, session=None, data=None, images=None, type=None, options=None, description=None): if options is None: options = {} url = session.host + '/sessions/' + str(session.id) + '/visualizations' if not images: payload = {'data': data, 'type': type, 'options': options} if description: payload['description'] = description headers = {'Content-type': 'application/json', 'Accept': 'text/plain'} r = requests.post(url, data=json.dumps(payload), headers=headers, auth=session.auth) if r.status_code == 404: raise Exception(r.text) elif not r.status_code == requests.codes.ok: raise Exception('Problem uploading data') viz = cls(session=session, json=r.json(), auth=session.auth) else: first_image, remaining_images = images[0], images[1:] files = {'file': first_image} payload = {'type': type, 'options': json.dumps(options)} if description: payload['description'] = description r = requests.post(url, files=files, data=payload, auth=session.auth) if r.status_code == 404: raise Exception(r.text) elif not r.status_code == requests.codes.ok: raise Exception('Problem uploading images') viz = cls(session=session, json=r.json(), auth=session.auth) for image in remaining_images: viz._append_image(image) return viz
class Widget(LoggingConfigurable): #------------------------------------------------------------------------- # Class attributes #------------------------------------------------------------------------- _widget_construction_callback = None widgets = {} widget_types = {} @staticmethod def on_widget_constructed(callback): """Registers a callback to be called when a widget is constructed. The callback must have the following signature: callback(widget)""" Widget._widget_construction_callback = callback @staticmethod def _call_widget_constructed(widget): """Static method, called when a widget is constructed.""" if Widget._widget_construction_callback is not None and callable(Widget._widget_construction_callback): Widget._widget_construction_callback(widget) @staticmethod def handle_comm_opened(comm, msg): """Static method, called when a widget is constructed.""" widget_class = import_item(str(msg['content']['data']['widget_class'])) widget = widget_class(comm=comm) #------------------------------------------------------------------------- # Traits #------------------------------------------------------------------------- _model_module = Unicode(None, allow_none=True, help="""A requirejs module name in which to find _model_name. If empty, look in the global registry.""", sync=True) _model_name = Unicode('WidgetModel', help="""Name of the backbone model registered in the front-end to create and sync this widget with.""", sync=True) _view_module = Unicode(help="""A requirejs module in which to find _view_name. If empty, look in the global registry.""", sync=True) _view_name = Unicode(None, allow_none=True, help="""Default view registered in the front-end to use to represent the widget.""", sync=True) comm = Instance('ipykernel.comm.Comm', allow_none=True) msg_throttle = Int(3, sync=True, help="""Maximum number of msgs the front-end can send before receiving an idle msg from the back-end.""") version = Int(0, sync=True, help="""Widget's version""") keys = List() def _keys_default(self): return [name for name in self.traits(sync=True)] _property_lock = Dict() _holding_sync = False _states_to_send = Set() _display_callbacks = Instance(CallbackDispatcher, ()) _msg_callbacks = Instance(CallbackDispatcher, ()) #------------------------------------------------------------------------- # (Con/de)structor #------------------------------------------------------------------------- def __init__(self, **kwargs): """Public constructor""" self._model_id = kwargs.pop('model_id', None) super(Widget, self).__init__(**kwargs) Widget._call_widget_constructed(self) self.open() def __del__(self): """Object disposal""" self.close() #------------------------------------------------------------------------- # Properties #------------------------------------------------------------------------- def open(self): """Open a comm to the frontend if one isn't already open.""" if self.comm is None: args = dict(target_name='ipython.widget', data=self.get_state()) if self._model_id is not None: args['comm_id'] = self._model_id self.comm = Comm(**args) def _comm_changed(self, name, new): """Called when the comm is changed.""" if new is None: return self._model_id = self.model_id self.comm.on_msg(self._handle_msg) Widget.widgets[self.model_id] = self @property def model_id(self): """Gets the model id of this widget. If a Comm doesn't exist yet, a Comm will be created automagically.""" return self.comm.comm_id #------------------------------------------------------------------------- # Methods #------------------------------------------------------------------------- def close(self): """Close method. Closes the underlying comm. When the comm is closed, all of the widget views are automatically removed from the front-end.""" if self.comm is not None: Widget.widgets.pop(self.model_id, None) self.comm.close() self.comm = None def send_state(self, key=None): """Sends the widget state, or a piece of it, to the front-end. Parameters ---------- key : unicode, or iterable (optional) A single property's name or iterable of property names to sync with the front-end. """ state = self.get_state(key=key) buffer_keys, buffers = [], [] for k, v in state.items(): if isinstance(v, memoryview): state.pop(k) buffers.append(v) buffer_keys.append(k) msg = {'method': 'update', 'state': state, 'buffers': buffer_keys} self._send(msg, buffers=buffers) def get_state(self, key=None): """Gets the widget state, or a piece of it. Parameters ---------- key : unicode or iterable (optional) A single property's name or iterable of property names to get. Returns ------- state : dict of states metadata : dict metadata for each field: {key: metadata} """ if key is None: keys = self.keys elif isinstance(key, string_types): keys = [key] elif isinstance(key, collections.Iterable): keys = key else: raise ValueError("key must be a string, an iterable of keys, or None") state = {} for k in keys: to_json = self.trait_metadata(k, 'to_json', self._trait_to_json) state[k] = to_json(getattr(self, k), self) return state def set_state(self, sync_data): """Called when a state is received from the front-end.""" # The order of these context managers is important. Properties must # be locked when the hold_trait_notification context manager is # released and notifications are fired. with self._lock_property(**sync_data), self.hold_trait_notifications(): for name in sync_data: if name in self.keys: from_json = self.trait_metadata(name, 'from_json', self._trait_from_json) # traitlets < 4.1 don't support read-only attributes if hasattr(self, 'set_trait'): self.set_trait(name, from_json(sync_data[name], self)) else: setattr(self, name, from_json(sync_data[name], self)) def send(self, content, buffers=None): """Sends a custom msg to the widget model in the front-end. Parameters ---------- content : dict Content of the message to send. buffers : list of binary buffers Binary buffers to send with message """ self._send({"method": "custom", "content": content}, buffers=buffers) def on_msg(self, callback, remove=False): """(Un)Register a custom msg receive callback. Parameters ---------- callback: callable callback will be passed three arguments when a message arrives:: callback(widget, content, buffers) remove: bool True if the callback should be unregistered.""" self._msg_callbacks.register_callback(callback, remove=remove) def on_displayed(self, callback, remove=False): """(Un)Register a widget displayed callback. Parameters ---------- callback: method handler Must have a signature of:: callback(widget, **kwargs) kwargs from display are passed through without modification. remove: bool True if the callback should be unregistered.""" self._display_callbacks.register_callback(callback, remove=remove) def add_traits(self, **traits): """Dynamically add trait attributes to the Widget.""" super(Widget, self).add_traits(**traits) for name, trait in traits.items(): if trait.get_metadata('sync'): self.keys.append(name) self.send_state(name) #------------------------------------------------------------------------- # Support methods #------------------------------------------------------------------------- @contextmanager def _lock_property(self, **properties): """Lock a property-value pair. The value should be the JSON state of the property. NOTE: This, in addition to the single lock for all state changes, is flawed. In the future we may want to look into buffering state changes back to the front-end.""" self._property_lock = properties try: yield finally: self._property_lock = {} @contextmanager def hold_sync(self): """Hold syncing any state until the outermost context manager exits""" if self._holding_sync is True: yield else: try: self._holding_sync = True yield finally: self._holding_sync = False self.send_state(self._states_to_send) self._states_to_send.clear() def _should_send_property(self, key, value): """Check the property lock (property_lock)""" to_json = self.trait_metadata(key, 'to_json', self._trait_to_json) if (key in self._property_lock and to_json(value, self) == self._property_lock[key]): return False elif self._holding_sync: self._states_to_send.add(key) return False else: return True # Event handlers @_show_traceback def _handle_msg(self, msg): """Called when a msg is received from the front-end""" data = msg['content']['data'] method = data['method'] # Handle backbone sync methods CREATE, PATCH, and UPDATE all in one. if method == 'backbone': if 'sync_data' in data: # get binary buffers too sync_data = data['sync_data'] for i,k in enumerate(data.get('buffer_keys', [])): sync_data[k] = msg['buffers'][i] self.set_state(sync_data) # handles all methods # Handle a state request. elif method == 'request_state': self.send_state() # Handle a custom msg from the front-end. elif method == 'custom': if 'content' in data: self._handle_custom_msg(data['content'], msg['buffers']) # Catch remainder. else: self.log.error('Unknown front-end to back-end widget msg with method "%s"' % method) def _handle_custom_msg(self, content, buffers): """Called when a custom msg is received.""" self._msg_callbacks(self, content, buffers) def _notify_trait(self, name, old_value, new_value): """Called when a property has been changed.""" # Trigger default traitlet callback machinery. This allows any user # registered validation to be processed prior to allowing the widget # machinery to handle the state. LoggingConfigurable._notify_trait(self, name, old_value, new_value) # Send the state after the user registered callbacks for trait changes # have all fired (allows for user to validate values). if self.comm is not None and name in self.keys: # Make sure this isn't information that the front-end just sent us. if self._should_send_property(name, new_value): # Send new state to front-end self.send_state(key=name) def _handle_displayed(self, **kwargs): """Called when a view has been displayed for this widget instance""" self._display_callbacks(self, **kwargs) @staticmethod def _trait_to_json(x, self): """Convert a trait value to json.""" return x @staticmethod def _trait_from_json(x, self): """Convert json values to objects.""" return x def _ipython_display_(self, **kwargs): """Called when `IPython.display.display` is called on the widget.""" # Show view. if self._view_name is not None: self._send({"method": "display"}) self._handle_displayed(**kwargs) def _send(self, msg, buffers=None): """Sends a message to the model in the front-end.""" self.comm.send(data=msg, buffers=buffers)
class CommSocket: """ Manages the Comm connection between IPython and the browser (client). Comms are 2 way, with the CommSocket being able to publish a message via the send_json method, and handle a message with on_message. On the JS side figure.send_message and figure.ws.onmessage do the sending and receiving respectively. """ def __init__(self, manager): self.supports_binary = None self.manager = manager self.uuid = str(uuid.uuid4()) # Publish an output area with a unique ID. The javascript can then # hook into this area. display(HTML("<div id=%r></div>" % self.uuid)) try: self.comm = Comm('matplotlib', data={'id': self.uuid}) except AttributeError as err: raise RuntimeError('Unable to create an IPython notebook Comm ' 'instance. Are you in the IPython ' 'notebook?') from err self.comm.on_msg(self.on_message) manager = self.manager self._ext_close = False def _on_close(close_message): self._ext_close = True manager.remove_comm(close_message['content']['comm_id']) manager.clearup_closed() self.comm.on_close(_on_close) def is_open(self): return not (self._ext_close or self.comm._closed) def on_close(self): # When the socket is closed, deregister the websocket with # the FigureManager. if self.is_open(): try: self.comm.close() except KeyError: # apparently already cleaned it up? pass def send_json(self, content): self.comm.send({'data': json.dumps(content)}) def send_binary(self, blob): if self.supports_binary: self.comm.send({'blob': 'image/png'}, buffers=[blob]) else: # The comm is ASCII, so we send the image in base64 encoded data # URL form. data = b64encode(blob).decode('ascii') data_uri = "data:image/png;base64,{0}".format(data) self.comm.send({'data': data_uri}) def on_message(self, message): # The 'supports_binary' message is relevant to the # websocket itself. The other messages get passed along # to matplotlib as-is. # Every message has a "type" and a "figure_id". message = json.loads(message['content']['data']) if message['type'] == 'closing': self.on_close() self.manager.clearup_closed() elif message['type'] == 'supports_binary': self.supports_binary = message['value'] else: self.manager.handle_json(message)
class JobManager(object): """ The KBase Job Manager clsas. This handles all jobs and makes their status available. On status lookups, it feeds the results to the KBaseJobs channel that the front end listens to. """ __instance = None # keys = job_id, values = { refresh = T/F, job = Job object } _running_jobs = dict() _lookup_timer = None _comm = None _log = kblogging.get_logger(__name__) # TODO: should this not be done globally? _running_lookup_loop = False def __new__(cls): if JobManager.__instance is None: JobManager.__instance = object.__new__(cls) return JobManager.__instance def initialize_jobs(self): """ Initializes this JobManager. This is expected to be run by a running Narrative, and naturally linked to a workspace. So it does the following steps. 1. app_util.system_variable('workspace_id') 2. get list of jobs with that ws id from UJS (also gets tag, cell_id, run_id) 3. initialize the Job objects by running NJS.get_job_params on each of those (also gets app_id) 4. start the status lookup loop. """ ws_id = system_variable('workspace_id') try: nar_jobs = clients.get('user_and_job_state').list_jobs2({ 'authstrat': 'kbaseworkspace', 'authparams': [str(ws_id)] }) except Exception as e: kblogging.log_event(self._log, 'init_error', {'err': str(e)}) new_e = transform_job_exception(e) error = { 'error': 'Unable to get initial jobs list', 'message': getattr(new_e, 'message', 'Unknown reason'), 'code': getattr(new_e, 'code', -1), 'source': getattr(new_e, 'source', 'jobmanager'), 'name': getattr(new_e, 'name', type(e).__name__), 'service': 'user_and_job_state' } self._send_comm_message('job_init_err', error) raise new_e for info in nar_jobs: job_id = info[0] user_info = info[1] job_meta = info[10] try: job_info = clients.get('job_service').get_job_params(job_id)[0] self._running_jobs[job_id] = { 'refresh': True, 'job': Job.from_state(job_id, job_info, user_info[0], app_id=job_info.get('app_id'), tag=job_meta.get('tag', 'release'), cell_id=job_meta.get('cell_id', None), run_id=job_meta.get('run_id', None)) } except Exception as e: kblogging.log_event(self._log, 'init_error', {'err': str(e)}) new_e = transform_job_exception(e) error = { 'error': 'Unable to get job info on initial lookup', 'job_id': job_id, 'message': getattr(new_e, 'message', 'Unknown reason'), 'code': getattr(new_e, 'code', -1), 'source': getattr(new_e, 'source', 'jobmanager'), 'name': getattr(new_e, 'name', type(e).__name__), 'service': 'job_service' } self._send_comm_message('job_init_lookup_err', error) raise new_e # should crash and burn on any of these. if not self._running_lookup_loop: # only keep one loop at a time in cause this gets called again! if self._lookup_timer is not None: self._lookup_timer.cancel() self._running_lookup_loop = True self._lookup_job_status_loop() else: self._lookup_all_job_status() def list_jobs(self): """ List all job ids, their info, and status in a quick HTML format. """ try: status_set = list() for job_id in self._running_jobs: job = self._running_jobs[job_id]['job'] job_state = job.state() job_params = job.parameters() job_state['app_id'] = job_params[0].get('app_id', 'Unknown App') job_state['owner'] = job.owner status_set.append(job_state) if not len(status_set): return "No running jobs!" status_set = sorted(status_set, key=lambda s: s['creation_time']) for i in range(len(status_set)): status_set[i]['creation_time'] = datetime.datetime.strftime(datetime.datetime.fromtimestamp(status_set[i]['creation_time']/1000), "%Y-%m-%d %H:%M:%S") exec_start = status_set[i].get('exec_start_time', None) if 'finish_time' in status_set[i]: finished = status_set[i].get('finish_time', None) if finished is not None and exec_start: delta = datetime.datetime.fromtimestamp(finished/1000.0) - datetime.datetime.fromtimestamp(exec_start/1000.0) delta = delta - datetime.timedelta(microseconds=delta.microseconds) status_set[i]['run_time'] = str(delta) status_set[i]['finish_time'] = datetime.datetime.strftime(datetime.datetime.fromtimestamp(status_set[i]['finish_time']/1000), "%Y-%m-%d %H:%M:%S") elif exec_start: delta = datetime.datetime.utcnow() - datetime.datetime.utcfromtimestamp(exec_start/1000.0) delta = delta - datetime.timedelta(microseconds=delta.microseconds) status_set[i]['run_time'] = str(delta) else: status_set[i]['run_time'] = 'Not started' tmpl = """ <table class="table table-bordered table-striped table-condensed"> <tr> <th>Id</th> <th>Name</th> <th>Submitted</th> <th>Submitted By</th> <th>Status</th> <th>Run Time</th> <th>Complete Time</th> </tr> {% for j in jobs %} <tr> <td>{{ j.job_id|e }}</td> <td>{{ j.app_id|e }}</td> <td>{{ j.creation_time|e }}</td> <td>{{ j.owner|e }}</td> <td>{{ j.job_state|e }}</td> <td>{{ j.run_time|e }}</td> <td>{% if j.finish_time %}{{ j.finish_time|e }}{% else %}Incomplete{% endif %}</td> </tr> {% endfor %} </table> """ return HTML(Template(tmpl).render(jobs=status_set)) except Exception as e: kblogging.log_event(self._log, "list_jobs.error", {'err': str(e)}) raise def get_jobs_list(self): """ A convenience method for fetching an unordered list of all running Jobs. """ return [j['job'] for j in self._running_jobs.values()] # def _get_existing_job(self, job_tuple): # """ # creates a Job object from a job_id that already exists. # If no job exists, raises an Exception. # Parameters: # ----------- # job_tuple : The expected 5-tuple representing a Job. The format is: # (job_id, set of job inputs (as JSON), version tag, cell id that started the job, run id of the job) # """ # # remove the prefix (if present) and take the last element in the split # job_id = job_tuple[0].split(':')[-1] # try: # job_info = clients.get('job_service').get_job_params(job_id)[0] # return Job.from_state(job_id, job_info, app_id=job_tuple[1], tag=job_tuple[2], cell_id=job_tuple[3], run_id=job_tuple[4]) # except Exception as e: # kblogging.log_event(self._log, "get_existing_job.error", {'job_id': job_id, 'err': str(e)}) # raise def _construct_job_status(self, job_id): """ Always creates a Job Status. It'll embed error messages into the status if there are problems. """ state = {} widget_info = None app_spec = {} job = self.get_job(job_id) if job is None: state = { 'job_state': 'error', 'error': { 'error': 'Job does not seem to exist, or it is otherwise unavailable.', 'message': 'Job does not exist', 'name': 'Job Error', 'code': -1, 'exception': { 'error_message': 'job not found in JobManager', 'error_type': 'ValueError', 'error_stacktrace': '' } }, 'cell_id': None, 'run_id': None } return { 'state': state, 'app_spec': app_spec, 'widget_info': widget_info, 'owner': None } try: app_spec = job.app_spec() except Exception as e: kblogging.log_event(self._log, "lookup_job_status.error", {'err': str(e)}) try: state = job.state() except Exception as e: kblogging.log_event(self._log, "lookup_job_status.error", {'err': str(e)}) new_e = transform_job_exception(e) e_type = type(e).__name__ e_message = str(new_e).replace('<', '<').replace('>', '>') e_trace = traceback.format_exc().replace('<', '<').replace('>', '>') e_code = getattr(new_e, "code", -2) e_source = getattr(new_e, "source", "JobManager") state = { 'job_state': 'error', 'error': { 'error': 'Unable to find current job state. Please try again later, or contact KBase.', 'message': 'Unable to return job state', 'name': 'Job Error', 'code': e_code, 'source': e_source, 'exception': { 'error_message': e_message, 'error_type': e_type, 'error_stacktrace': e_trace, } }, 'creation_time': 0, 'cell_id': job.cell_id, 'run_id': job.run_id, 'job_id': job_id } if state.get('finished', 0) == 1: try: widget_info = job.get_viewer_params(state) except Exception as e: # Can't get viewer params new_e = transform_job_exception(e) kblogging.log_event(self._log, "lookup_job_status.error", {'err': str(e)}) state['job_state'] = 'error' state['error'] = { 'error': 'Unable to generate App output viewer!\nThe App appears to have completed successfully,\nbut we cannot construct its output viewer.\nPlease contact the developer of this App for assistance.', 'message': 'Unable to build output viewer parameters!', 'name': 'App Error', 'code': getattr(new_e, "code", -1), 'source': getattr(new_e, "source", "JobManager") } if 'canceling' in self._running_jobs[job_id]: state['job_state'] = 'canceling' return {'state': state, 'spec': app_spec, 'widget_info': widget_info, 'owner': job.owner} def _lookup_job_status(self, job_id): """ Will raise a ValueError if job_id doesn't exist. Sends the status over the comm channel as the usual job_status message. """ status = self._construct_job_status(job_id) self._send_comm_message('job_status', status) def _lookup_all_job_status(self, ignore_refresh_flag=False): """ Looks up status for all jobs. Once job info is acquired, it gets pushed to the front end over the 'KBaseJobs' channel. """ status_set = dict() # grab the list of running job ids, so we don't run into update-while-iterating problems. for job_id in self._running_jobs.keys(): if self._running_jobs[job_id]['refresh'] or ignore_refresh_flag: status_set[job_id] = self._construct_job_status(job_id) self._send_comm_message('job_status_all', status_set) def _lookup_job_status_loop(self): """ Initialize a loop that will look up job info. This uses a Timer thread on a 10 second loop to update things. """ self._lookup_all_job_status() self._lookup_timer = threading.Timer(10, self._lookup_job_status_loop) self._lookup_timer.start() def cancel_job_lookup_loop(self): """ Cancels a running timer if one's still alive. """ if self._lookup_timer: self._lookup_timer.cancel() self._lookup_timer = None self._running_lookup_loop = False def register_new_job(self, job): """ Registers a new Job with the manager - should only be invoked when a new Job gets started. This stores the Job locally and pushes it over the comm channel to the Narrative where it gets serialized. Parameters: ----------- job : biokbase.narrative.jobs.job.Job object The new Job that was started. """ self._running_jobs[job.job_id] = {'job': job, 'refresh': True} # push it forward! create a new_job message. self._lookup_job_status(job.job_id) self._send_comm_message('new_job', {}) def get_job(self, job_id): """ Returns a Job with the given job_id. Raises a ValueError if not found. """ if job_id in self._running_jobs: return self._running_jobs[job_id]['job'] else: raise ValueError('No job present with id {}'.format(job_id)) def _handle_comm_message(self, msg): """ Handles comm messages that come in from the other end of the KBaseJobs channel. All messages (of any use) should have a 'request_type' property. Possible types: * all_status refresh all jobs that are flagged to be looked up. Will send a message back with all lookup status. * job_status refresh the single job given in the 'job_id' field. Sends a message back with that single job's status, or an error message. * stop_update_loop stop the running refresh loop, if there's one going (might be one more pass, depending on the thread state) * start_update_loop reinitialize the refresh loop. * stop_job_update flag the given job id (should be an accompanying 'job_id' field) that the front end knows it's in a terminal state and should no longer have its status looked up in the refresh cycle. * start_job_update remove the flag that gets set by stop_job_update (needs an accompanying 'job_id' field) """ if 'request_type' in msg['content']['data']: r_type = msg['content']['data']['request_type'] job_id = msg['content']['data'].get('job_id', None) if job_id is not None and job_id not in self._running_jobs: # If it's not a real job, just silently ignore the request. # Maybe return an error? Yeah. Let's do that. # self._send_comm_message('job_comm_error', {'job_id': job_id, 'message': 'Unknown job id', 'request_type': r_type}) # TODO: perhaps we should implement request/response here. All we really need is to thread a message # id through self._send_comm_message('job_does_not_exist', {'job_id': job_id, 'request_type': r_type}) return if r_type == 'all_status': self._lookup_all_job_status(ignore_refresh_flag=True) elif r_type == 'job_status': if job_id is not None: self._lookup_job_status(job_id) elif r_type == 'stop_update_loop': if self._lookup_timer is not None: self._lookup_timer.cancel() elif r_type == 'start_update_loop': self._lookup_job_status_loop() elif r_type == 'stop_job_update': if job_id is not None: self._running_jobs[job_id]['refresh'] = False elif r_type == 'start_job_update': if job_id is not None: self._running_jobs[job_id]['refresh'] = True elif r_type == 'delete_job': if job_id is not None: try: self.delete_job(job_id) except Exception as e: self._send_comm_message('job_comm_error', {'message': str(e), 'request_type': r_type, 'job_id': job_id}) elif r_type == 'cancel_job': if job_id is not None: try: self.cancel_job(job_id) except Exception as e: self._send_comm_message('job_comm_error', {'message': str(e), 'request_type': r_type, 'job_id': job_id}) elif r_type == 'job_logs': if job_id is not None: first_line = msg['content']['data'].get('first_line', 0) num_lines = msg['content']['data'].get('num_lines', None) self._get_job_logs(job_id, first_line=first_line, num_lines=num_lines) else: raise ValueError('Need a job id to fetch jobs!') elif r_type == 'job_logs_latest': if job_id is not None: num_lines = msg['content']['data'].get('num_lines', None) self._get_latest_job_logs(job_id, num_lines=num_lines) else: self._send_comm_message('job_comm_error', {'message': 'Unknown message', 'request_type': r_type}) raise ValueError('Unknown KBaseJobs message "{}"'.format(r_type)) def _get_latest_job_logs(self, job_id, num_lines=None): job = self.get_job(job_id) if job is None: raise ValueError('job "{}" not found while fetching logs!'.format(job_id)) (max_lines, logs) = job.log() first_line = 0 if num_lines is not None and max_lines > num_lines: first_line = max_lines - num_lines logs = logs[first_line:] self._send_comm_message('job_logs', {'job_id': job_id, 'first': first_line, 'max_lines': max_lines, 'lines': logs, 'latest': True}) def _get_job_logs(self, job_id, first_line=0, num_lines=None): job = self.get_job(job_id) if job is None: raise ValueError('job "{}" not found!'.format(job_id)) (max_lines, log_slice) = job.log(first_line=first_line, num_lines=num_lines) self._send_comm_message('job_logs', {'job_id': job_id, 'first': first_line, 'max_lines': max_lines, 'lines': log_slice, 'latest': False}) def delete_job(self, job_id): """ If the job_id doesn't exist, raises a ValueError. Attempts to delete a job, and cancels it first. If the job cannot be canceled, raises an exception. If it can be canceled but not deleted, it gets canceled, then raises an exception. """ if job_id is None: raise ValueError('Job id required for deletion!') if job_id not in self._running_jobs: self._send_comm_message('job_does_not_exist', {'job_id': job_id, 'source': 'delete_job'}) return # raise ValueError('Attempting to cancel a Job that does not exist!') try: self.cancel_job(job_id) except Exception as e: raise try: clients.get('user_and_job_state').delete_job(job_id) except Exception as e: raise del self._running_jobs[job_id] self._send_comm_message('job_deleted', {'job_id': job_id}) def cancel_job(self, job_id): """ Cancels a running job, placing it in a canceled state. Does NOT delete the job. Raises an exception if the current user doesn't have permission to cancel the job. """ if job_id is None: raise ValueError('Job id required for cancellation!') if job_id not in self._running_jobs: self._send_comm_message('job_does_not_exist', {'job_id': job_id, 'source': 'cancel_job'}) return try: job = self.get_job(job_id) state = job.state() if state.get('canceled', 0) == 1 or state.get('finished', 0) == 1: # It's already finished, don't try to cancel it again. return except Exception as e: raise ValueError('Unable to get Job state') # Stop updating the job status while we try to cancel. # Also, set it to have a special state of 'canceling' while we're doing the cancel is_refreshing = self._running_jobs[job_id].get('refresh', False) self._running_jobs[job_id]['refresh'] = False self._running_jobs[job_id]['canceling'] = True try: clients.get('job_service').cancel_job({'job_id': job_id}) except Exception as e: new_e = transform_job_exception(e) error = { 'error': 'Unable to get cancel job', 'message': getattr(new_e, 'message', 'Unknown reason'), 'code': getattr(new_e, 'code', -1), 'source': getattr(new_e, 'source', 'jobmanager'), 'name': getattr(new_e, 'name', type(e).__name__), 'request_type': 'cancel_job', 'job_id': job_id } self._send_comm_message('job_comm_error', error) raise(e) finally: self._running_jobs[job_id]['refresh'] = is_refreshing del self._running_jobs[job_id]['canceling'] # # self._send_comm_message('job_canceled', {'job_id': job_id}) # Rather than a separate message, how about triggering a job-status message: self._lookup_job_status(job_id) def _send_comm_message(self, msg_type, content): """ Sends a ipykernel.Comm message to the KBaseJobs channel with the given msg_type and content. These just get encoded into the message itself. """ msg = { 'msg_type': msg_type, 'content': content } if self._comm is None: self._comm = Comm(target_name='KBaseJobs', data={}) self._comm.on_msg(self._handle_comm_message) self._comm.send(msg)