def start_kernel(self, **kwargs): """Start a new kernel.""" kernel_id = unicode(uuid.uuid4()) (process, shell_port, iopub_port, stdin_port, hb_port) = launch_kernel(**kwargs) # Store the information for contacting the kernel. This assumes the kernel is # running on localhost. d = dict( process=process, stdin_port=stdin_port, iopub_port=iopub_port, shell_port=shell_port, hb_port=hb_port, ip="127.0.0.1", ) self._kernels[kernel_id] = d return kernel_id
def run_ip_device(): import uuid import zmq from ip_receiver import IPReceiver from IPython.zmq.ipkernel import launch_kernel device_id = random.randrange(sys.maxint) kernel = launch_kernel() db.set_ipython_ports(kernel) sub = IPReceiver(zmq.SUB, kernel[2]) context = zmq.Context() xreq = context.socket(zmq.XREQ) xreq.connect("tcp://localhost:%i" % (kernel[1], )) while True: for X in db.get_unevaluated_cells(device_id): header = {"msg_id": str(X["_id"])} xreq.send_json({"header":header, "msg_type":"execute_request", "content": { \ "code":X['input'], "silent":False, "user_variables":[], "user_expressions":{}}}) out = "" err = "" while True: print "here" changed = False done = False for msg in sub.getMessages(header): if msg["msg_type"] == "stream": out += msg["content"]["data"] changed = True elif msg["msg_type"] == "pyout": out += msg["content"]["data"]["text/plain"] + "\n" elif msg["msg_type"] == "status" and msg["content"][ "execution_state"] == "idle": done = True elif msg["msg_type"] == "pyerr": err += new_stream("error", printout=False, **msg["content"]) if changed or done: db.set_output(X["_id"], make_output_json(out + err, done)) if done: break time.sleep(0.1)
def run_ip_device(): import uuid import zmq from ip_receiver import IPReceiver from IPython.zmq.ipkernel import launch_kernel device_id=random.randrange(sys.maxint) kernel=launch_kernel() db.set_ipython_ports(kernel) sub=IPReceiver(zmq.SUB, kernel[2]) context=zmq.Context() xreq=context.socket(zmq.XREQ) xreq.connect("tcp://localhost:%i"%(kernel[1],)) while True: for X in db.get_unevaluated_cells(device_id): header={"msg_id":str(X["_id"])} xreq.send_json({"header":header, "msg_type":"execute_request", "content": { \ "code":X['input'], "silent":False, "user_variables":[], "user_expressions":{}}}) out="" err="" while True: print "here" changed=False done=False for msg in sub.getMessages(header): if msg["msg_type"]=="stream": out+=msg["content"]["data"] changed=True elif msg["msg_type"]=="pyout": out+=msg["content"]["data"]["text/plain"]+"\n" elif msg["msg_type"]=="status" and msg["content"]["execution_state"]=="idle": done=True elif msg["msg_type"]=="pyerr": err+=new_stream("error", printout=False, **msg["content"]) if changed or done: db.set_output(X["_id"], make_output_json(out+err, done)) if done: break time.sleep(0.1)
def run_ip_worker(request_msg): """ Execute one block of input code and then exit INPUT: request_msg---a json message with the input code, in ipython messaging format """ #TODO: db and fs are inherited from the parent process; is that thread safe? import uuid import zmq import os import shutil import tempfile from ip_receiver import IPReceiver from IPython.zmq.ipkernel import launch_kernel msg_id=str(request_msg['_id']) log(db, msg_id, message='Starting run_ip_worker') #TODO: launch the kernel by forking an already-running clean process kernel=launch_kernel() tempDir=tempfile.mkdtemp() log(db, msg_id, message="Temporary directory: %s"%tempDir) db.set_ipython_ports(kernel) sub=IPReceiver(zmq.SUB, kernel[2]) context=zmq.Context() xreq=context.socket(zmq.XREQ) xreq.connect("tcp://localhost:%i"%(kernel[1],)) log(db, msg_id, 'Finished setting up IPython kernel') sequence=0 inputCode="import os\nos.chdir(%r)\n"%(tempDir,)+request_msg["input"] header={"msg_id": msg_id} xreq.send_json({"header": header, "msg_type": "execute_request", "content": {"code":inputCode, "silent":False, "user_variables":[], "user_expressions":{}} }) log(db, msg_id, "Sent request, starting loop for output") while True: done=False new_messages=[] for msg in sub.getMessages(header): if msg["msg_type"] in ("stream", "display_data", "pyout", "extension","execute_reply","status","pyerr"): msg['sequence']=sequence sequence+=1 new_messages.append(msg) if msg["msg_type"]=="execute_reply" or \ (msg["msg_type"]=="status" and msg["content"]["execution_state"]=="idle"): done=True if len(new_messages)>0: db.add_messages(request_msg["_id"],new_messages) if done: break file_list=[] for filename in os.listdir(tempDir): file_list.append(filename) fs_file=fs.new_file(request_msg["_id"], filename) with open(tempDir+"/"+filename) as f: fs_file.write(f.read()) fs_file.close() if len(file_list)>0: file_list.sort() #TODO: this message comes *after* the end of computation message from ipython # either we need to slip it in before, or we need to have our own custom end-of-computation message. db.add_messages(request_msg["_id"],[{'parent_header':header, 'sequence':sequence, 'msg_type':'extension', 'content':{'msg_type':'files', 'files':file_list}}]) shutil.rmtree(tempDir) sequence+=1 db.add_messages(request_msg["_id"],[{'parent_header':header, 'sequence':sequence, 'msg_type':'extension', 'content':{'msg_type':'comp_end'}}]) #TODO: make polling interval a variable time.sleep(0.1)