def connect(self, south_peer=None, west_peer=None): """connect to peers. `peers` will be a 3-tuples, of the form: (location, north_addr, east_addr) as produced by """ if south_peer is not None: location, url, _ = south_peer self.south.connect(disambiguate_url(url, location)) if west_peer is not None: location, _, url = west_peer self.west.connect(disambiguate_url(url, location))
def connect(self, peers): """connect to peers. `peers` will be a dict of 4-tuples, keyed by name. {peer : (ident, addr, pub_addr, location)} where peer is the name, ident is the XREP identity, addr,pub_addr are the """ for peer, (ident, url, pub_url, location) in peers.items(): self.peers[peer] = ident if ident != self.identity: self.sub.connect(disambiguate_url(pub_url, location)) if ident > self.identity: # prevent duplicate xrep, by only connecting # engines to engines with higher IDENTITY # a doubly-connected pair will crash self.socket.connect(disambiguate_url(url, location))
def main(connection_file): """watch iopub channel, and print messages""" ctx = zmq.Context.instance() with open(connection_file) as f: cfg = json.loads(f.read()) location = cfg["location"] reg_url = cfg["url"] session = Session(key=str_to_bytes(cfg["exec_key"])) query = ctx.socket(zmq.DEALER) query.connect(disambiguate_url(cfg["url"], location)) session.send(query, "connection_request") idents, msg = session.recv(query, mode=0) c = msg["content"] iopub_url = disambiguate_url(c["iopub"], location) sub = ctx.socket(zmq.SUB) # This will subscribe to all messages: sub.setsockopt(zmq.SUBSCRIBE, b"") # replace with b'' with b'engine.1.stdout' to subscribe only to engine 1's stdout # 0MQ subscriptions are simple 'foo*' matches, so 'engine.1.' subscribes # to everything from engine 1, but there is no way to subscribe to # just stdout from everyone. # multiple calls to subscribe will add subscriptions, e.g. to subscribe to # engine 1's stderr and engine 2's stdout: # sub.setsockopt(zmq.SUBSCRIBE, b'engine.1.stderr') # sub.setsockopt(zmq.SUBSCRIBE, b'engine.2.stdout') sub.connect(iopub_url) while True: try: idents, msg = session.recv(sub, mode=0) except KeyboardInterrupt: return # ident always length 1 here topic = idents[0] if msg["msg_type"] == "stream": # stdout/stderr # stream names are in msg['content']['name'], if you want to handle # them differently print "%s: %s" % (topic, msg["content"]["data"]) elif msg["msg_type"] == "pyerr": # Python traceback c = msg["content"] print topic + ":" for line in c["traceback"]: # indent lines print " " + line
def load_connector_file(self): """load config from a JSON connector file, at a *lower* priority than command-line/config files. """ self.log.info("Loading url_file %r"%self.url_file) config = self.config with open(self.url_file) as f: d = json.loads(f.read()) if 'exec_key' in d: config.Session.key = asbytes(d['exec_key']) try: config.EngineFactory.location except AttributeError: config.EngineFactory.location = d['location'] d['url'] = disambiguate_url(d['url'], config.EngineFactory.location) try: config.EngineFactory.url except AttributeError: config.EngineFactory.url = d['url'] try: config.EngineFactory.sshserver except AttributeError: config.EngineFactory.sshserver = d['ssh']
def construct(self): # This is the working dir by now. sys.path.insert(0, '') config = self.master_config # if os.path.exists(config.Global.key_file) and config.Global.secure: # config.SessionFactory.exec_key = config.Global.key_file if os.path.exists(config.Global.url_file): with open(config.Global.url_file) as f: d = json.loads(f.read()) for k,v in d.iteritems(): if isinstance(v, unicode): d[k] = v.encode() if d['exec_key']: config.SessionFactory.exec_key = d['exec_key'] d['url'] = disambiguate_url(d['url'], d['location']) config.RegistrationFactory.url=d['url'] config.EngineFactory.location = d['location'] config.Kernel.exec_lines = config.Global.exec_lines self.start_mpi() # Create the underlying shell class and EngineService # shell_class = import_item(self.master_config.Global.shell_class) try: self.engine = EngineFactory(config=config, logname=self.log.name) except: self.log.error("Couldn't start the Engine", exc_info=True) self.exit(1) self.start_logging()
def maybe_tunnel(url): """like connect, but don't complete the connection (for use by heartbeat)""" url = disambiguate_url(url, self.location) if self.using_ssh: self.log.debug("Tunneling connection to %s via %s" % (url, self.sshserver)) url, tunnelobj = tunnel.open_tunnel( url, self.sshserver, keyfile=self.sshkey, paramiko=self.paramiko, password=password ) return url
def connect(s, url): url = disambiguate_url(url, self.location) if self.using_ssh: self.log.debug("Tunneling connection to %s via %s" % (url, self.sshserver)) return tunnel.tunnel_connection( s, url, self.sshserver, keyfile=self.sshkey, paramiko=self.paramiko, password=password ) else: return s.connect(url)
def init_engine(self): # This is the working dir by now. sys.path.insert(0, '') config = self.config # print config self.find_url_file() # if os.path.exists(config.Global.key_file) and config.Global.secure: # config.SessionFactory.exec_key = config.Global.key_file if os.path.exists(self.url_file): with open(self.url_file) as f: d = json.loads(f.read()) for k,v in d.items(): if isinstance(v, str): d[k] = v.encode() if d['exec_key']: config.Session.key = d['exec_key'] d['url'] = disambiguate_url(d['url'], d['location']) config.EngineFactory.url = d['url'] config.EngineFactory.location = d['location'] try: exec_lines = config.Kernel.exec_lines except AttributeError: config.Kernel.exec_lines = [] exec_lines = config.Kernel.exec_lines if self.startup_script: enc = sys.getfilesystemencoding() or 'utf8' cmd="execfile(%r)"%self.startup_script.encode(enc) exec_lines.append(cmd) if self.startup_command: exec_lines.append(self.startup_command) # Create the underlying shell class and Engine # shell_class = import_item(self.master_config.Global.shell_class) # print self.config try: self.engine = EngineFactory(config=config, log=self.log) except: self.log.error("Couldn't start the Engine", exc_info=True) self.exit(1)
def complete_registration(self, msg): # print msg self._abort_dc.stop() ctx = self.context loop = self.loop identity = self.bident idents,msg = self.session.feed_identities(msg) msg = Message(self.session.unpack_message(msg)) if msg.content.status == 'ok': self.id = int(msg.content.id) # create Shell Streams (MUX, Task, etc.): queue_addr = msg.content.mux shell_addrs = [ str(queue_addr) ] task_addr = msg.content.task if task_addr: shell_addrs.append(str(task_addr)) # Uncomment this to go back to two-socket model # shell_streams = [] # for addr in shell_addrs: # stream = zmqstream.ZMQStream(ctx.socket(zmq.XREP), loop) # stream.setsockopt(zmq.IDENTITY, identity) # stream.connect(disambiguate_url(addr, self.location)) # shell_streams.append(stream) # Now use only one shell stream for mux and tasks stream = zmqstream.ZMQStream(ctx.socket(zmq.XREP), loop) stream.setsockopt(zmq.IDENTITY, identity) shell_streams = [stream] for addr in shell_addrs: stream.connect(disambiguate_url(addr, self.location)) # end single stream-socket # control stream: control_addr = str(msg.content.control) control_stream = zmqstream.ZMQStream(ctx.socket(zmq.XREP), loop) control_stream.setsockopt(zmq.IDENTITY, identity) control_stream.connect(disambiguate_url(control_addr, self.location)) # create iopub stream: iopub_addr = msg.content.iopub iopub_stream = zmqstream.ZMQStream(ctx.socket(zmq.PUB), loop) iopub_stream.setsockopt(zmq.IDENTITY, identity) iopub_stream.connect(disambiguate_url(iopub_addr, self.location)) # launch heartbeat hb_addrs = msg.content.heartbeat # print (hb_addrs) # # Redirect input streams and set a display hook. if self.out_stream_factory: sys.stdout = self.out_stream_factory(self.session, iopub_stream, 'stdout') sys.stdout.topic = 'engine.%i.stdout'%self.id sys.stderr = self.out_stream_factory(self.session, iopub_stream, 'stderr') sys.stderr.topic = 'engine.%i.stderr'%self.id if self.display_hook_factory: sys.displayhook = self.display_hook_factory(self.session, iopub_stream) sys.displayhook.topic = 'engine.%i.pyout'%self.id self.kernel = Kernel(config=self.config, int_id=self.id, ident=self.ident, session=self.session, control_stream=control_stream, shell_streams=shell_streams, iopub_stream=iopub_stream, loop=loop, user_ns = self.user_ns, log=self.log) self.kernel.start() hb_addrs = [ disambiguate_url(addr, self.location) for addr in hb_addrs ] heart = Heart(*list(map(str, hb_addrs)), heart_id=identity) heart.start() else: self.log.fatal("Registration Failed: %s"%msg) raise Exception("Registration Failed: %s"%msg) self.log.info("Completed registration with id %i"%self.id)
def init_engine(self): # This is the working dir by now. sys.path.insert(0, '') config = self.config # print config self.find_url_file() # was the url manually specified? keys = set(self.config.EngineFactory.keys()) keys = keys.union(set(self.config.RegistrationFactory.keys())) if keys.intersection(set(['ip', 'url', 'port'])): # Connection info was specified, don't wait for the file url_specified = True self.wait_for_url_file = 0 else: url_specified = False if self.wait_for_url_file and not os.path.exists(self.url_file): self.log.warn("url_file %r not found"%self.url_file) self.log.warn("Waiting up to %.1f seconds for it to arrive."%self.wait_for_url_file) tic = time.time() while not os.path.exists(self.url_file) and (time.time()-tic < self.wait_for_url_file): # wait for url_file to exist, for up to 10 seconds time.sleep(0.1) if os.path.exists(self.url_file): self.log.info("Loading url_file %r"%self.url_file) with open(self.url_file) as f: d = json.loads(f.read()) if d['exec_key']: config.Session.key = asbytes(d['exec_key']) d['url'] = disambiguate_url(d['url'], d['location']) config.EngineFactory.url = d['url'] config.EngineFactory.location = d['location'] elif not url_specified: self.log.critical("Fatal: url file never arrived: %s"%self.url_file) self.exit(1) try: exec_lines = config.Kernel.exec_lines except AttributeError: config.Kernel.exec_lines = [] exec_lines = config.Kernel.exec_lines if self.startup_script: enc = sys.getfilesystemencoding() or 'utf8' cmd="execfile(%r)"%self.startup_script.encode(enc) exec_lines.append(cmd) if self.startup_command: exec_lines.append(self.startup_command) # Create the underlying shell class and Engine # shell_class = import_item(self.master_config.Global.shell_class) # print self.config try: self.engine = EngineFactory(config=config, log=self.log) except: self.log.error("Couldn't start the Engine", exc_info=True) self.exit(1)
def complete_registration(self, msg): # print msg self._abort_dc.stop() ctx = self.context loop = self.loop identity = self.bident idents, msg = self.session.feed_identities(msg) msg = Message(self.session.unpack_message(msg)) if msg.content.status == 'ok': self.id = int(msg.content.id) # create Shell Streams (MUX, Task, etc.): queue_addr = msg.content.mux shell_addrs = [str(queue_addr)] task_addr = msg.content.task if task_addr: shell_addrs.append(str(task_addr)) # Uncomment this to go back to two-socket model # shell_streams = [] # for addr in shell_addrs: # stream = zmqstream.ZMQStream(ctx.socket(zmq.XREP), loop) # stream.setsockopt(zmq.IDENTITY, identity) # stream.connect(disambiguate_url(addr, self.location)) # shell_streams.append(stream) # Now use only one shell stream for mux and tasks stream = zmqstream.ZMQStream(ctx.socket(zmq.XREP), loop) stream.setsockopt(zmq.IDENTITY, identity) shell_streams = [stream] for addr in shell_addrs: stream.connect(disambiguate_url(addr, self.location)) # end single stream-socket # control stream: control_addr = str(msg.content.control) control_stream = zmqstream.ZMQStream(ctx.socket(zmq.XREP), loop) control_stream.setsockopt(zmq.IDENTITY, identity) control_stream.connect( disambiguate_url(control_addr, self.location)) # create iopub stream: iopub_addr = msg.content.iopub iopub_stream = zmqstream.ZMQStream(ctx.socket(zmq.PUB), loop) iopub_stream.setsockopt(zmq.IDENTITY, identity) iopub_stream.connect(disambiguate_url(iopub_addr, self.location)) # launch heartbeat hb_addrs = msg.content.heartbeat # print (hb_addrs) # # Redirect input streams and set a display hook. if self.out_stream_factory: sys.stdout = self.out_stream_factory(self.session, iopub_stream, u'stdout') sys.stdout.topic = 'engine.%i.stdout' % self.id sys.stderr = self.out_stream_factory(self.session, iopub_stream, u'stderr') sys.stderr.topic = 'engine.%i.stderr' % self.id if self.display_hook_factory: sys.displayhook = self.display_hook_factory( self.session, iopub_stream) sys.displayhook.topic = 'engine.%i.pyout' % self.id self.kernel = Kernel(config=self.config, int_id=self.id, ident=self.ident, session=self.session, control_stream=control_stream, shell_streams=shell_streams, iopub_stream=iopub_stream, loop=loop, user_ns=self.user_ns, log=self.log) self.kernel.start() hb_addrs = [ disambiguate_url(addr, self.location) for addr in hb_addrs ] heart = Heart(*map(str, hb_addrs), heart_id=identity) heart.start() else: self.log.fatal("Registration Failed: %s" % msg) raise Exception("Registration Failed: %s" % msg) self.log.info("Completed registration with id %i" % self.id)
def init_schedulers(self): children = self.children mq = import_item(str(self.mq_class)) hub = self.factory # disambiguate url, in case of * monitor_url = disambiguate_url(hub.monitor_url) # maybe_inproc = 'inproc://monitor' if self.use_threads else monitor_url # IOPub relay (in a Process) q = mq(zmq.PUB, zmq.SUB, zmq.PUB, b'N/A', b'iopub') q.bind_in(hub.client_info['iopub']) q.bind_out(hub.engine_info['iopub']) q.setsockopt_out(zmq.SUBSCRIBE, b'') q.connect_mon(monitor_url) q.daemon = True children.append(q) # Multiplexer Queue (in a Process) q = mq(zmq.ROUTER, zmq.ROUTER, zmq.PUB, b'in', b'out') q.bind_in(hub.client_info['mux']) q.setsockopt_in(zmq.IDENTITY, b'mux') q.bind_out(hub.engine_info['mux']) q.connect_mon(monitor_url) q.daemon = True children.append(q) # Control Queue (in a Process) q = mq(zmq.ROUTER, zmq.ROUTER, zmq.PUB, b'incontrol', b'outcontrol') q.bind_in(hub.client_info['control']) q.setsockopt_in(zmq.IDENTITY, b'control') q.bind_out(hub.engine_info['control']) q.connect_mon(monitor_url) q.daemon = True children.append(q) try: scheme = self.config.TaskScheduler.scheme_name except AttributeError: scheme = TaskScheduler.scheme_name.get_default_value() # Task Queue (in a Process) if scheme == 'pure': self.log.warn("task::using pure XREQ Task scheduler") q = mq(zmq.ROUTER, zmq.DEALER, zmq.PUB, b'intask', b'outtask') # q.setsockopt_out(zmq.HWM, hub.hwm) q.bind_in(hub.client_info['task'][1]) q.setsockopt_in(zmq.IDENTITY, b'task') q.bind_out(hub.engine_info['task']) q.connect_mon(monitor_url) q.daemon = True children.append(q) elif scheme == 'none': self.log.warn("task::using no Task scheduler") else: self.log.info("task::using Python %s Task scheduler" % scheme) sargs = (hub.client_info['task'][1], hub.engine_info['task'], monitor_url, disambiguate_url(hub.client_info['notification'])) kwargs = dict(logname='scheduler', loglevel=self.log_level, log_url=self.log_url, config=dict(self.config)) if 'Process' in self.mq_class: # run the Python scheduler in a Process q = Process(target=launch_scheduler, args=sargs, kwargs=kwargs) q.daemon = True children.append(q) else: # single-threaded Controller kwargs['in_thread'] = True launch_scheduler(*sargs, **kwargs)
def init_schedulers(self): children = self.children mq = import_item(str(self.mq_class)) f = self.factory ident = f.session.bsession # disambiguate url, in case of * monitor_url = disambiguate_url(f.monitor_url) # maybe_inproc = 'inproc://monitor' if self.use_threads else monitor_url # IOPub relay (in a Process) q = mq(zmq.PUB, zmq.SUB, zmq.PUB, b'N/A',b'iopub') q.bind_in(f.client_url('iopub')) q.setsockopt_in(zmq.IDENTITY, ident + b"_iopub") q.bind_out(f.engine_url('iopub')) q.setsockopt_out(zmq.SUBSCRIBE, b'') q.connect_mon(monitor_url) q.daemon=True children.append(q) # Multiplexer Queue (in a Process) q = mq(zmq.ROUTER, zmq.ROUTER, zmq.PUB, b'in', b'out') q.bind_in(f.client_url('mux')) q.setsockopt_in(zmq.IDENTITY, b'mux_in') q.bind_out(f.engine_url('mux')) q.setsockopt_out(zmq.IDENTITY, b'mux_out') q.connect_mon(monitor_url) q.daemon=True children.append(q) # Control Queue (in a Process) q = mq(zmq.ROUTER, zmq.ROUTER, zmq.PUB, b'incontrol', b'outcontrol') q.bind_in(f.client_url('control')) q.setsockopt_in(zmq.IDENTITY, b'control_in') q.bind_out(f.engine_url('control')) q.setsockopt_out(zmq.IDENTITY, b'control_out') q.connect_mon(monitor_url) q.daemon=True children.append(q) try: scheme = self.config.TaskScheduler.scheme_name except AttributeError: scheme = TaskScheduler.scheme_name.get_default_value() # Task Queue (in a Process) if scheme == 'pure': self.log.warn("task::using pure DEALER Task scheduler") q = mq(zmq.ROUTER, zmq.DEALER, zmq.PUB, b'intask', b'outtask') # q.setsockopt_out(zmq.HWM, hub.hwm) q.bind_in(f.client_url('task')) q.setsockopt_in(zmq.IDENTITY, b'task_in') q.bind_out(f.engine_url('task')) q.setsockopt_out(zmq.IDENTITY, b'task_out') q.connect_mon(monitor_url) q.daemon=True children.append(q) elif scheme == 'none': self.log.warn("task::using no Task scheduler") else: self.log.info("task::using Python %s Task scheduler"%scheme) sargs = (f.client_url('task'), f.engine_url('task'), monitor_url, disambiguate_url(f.client_url('notification')), disambiguate_url(f.client_url('registration')), ) kwargs = dict(logname='scheduler', loglevel=self.log_level, log_url = self.log_url, config=dict(self.config)) if 'Process' in self.mq_class: # run the Python scheduler in a Process q = Process(target=launch_scheduler, args=sargs, kwargs=kwargs) q.daemon=True children.append(q) else: # single-threaded Controller kwargs['in_thread'] = True launch_scheduler(*sargs, **kwargs) # set unlimited HWM for all relay devices if hasattr(zmq, 'SNDHWM'): q = children[0] q.setsockopt_in(zmq.RCVHWM, 0) q.setsockopt_out(zmq.SNDHWM, 0) for q in children[1:]: if not hasattr(q, 'setsockopt_in'): continue q.setsockopt_in(zmq.SNDHWM, 0) q.setsockopt_in(zmq.RCVHWM, 0) q.setsockopt_out(zmq.SNDHWM, 0) q.setsockopt_out(zmq.RCVHWM, 0) q.setsockopt_mon(zmq.SNDHWM, 0)
def init_engine(self): # This is the working dir by now. sys.path.insert(0, '') config = self.config # print config self.find_url_file() # was the url manually specified? keys = set(self.config.EngineFactory.keys()) keys = keys.union(set(self.config.RegistrationFactory.keys())) if keys.intersection(set(['ip', 'url', 'port'])): # Connection info was specified, don't wait for the file url_specified = True self.wait_for_url_file = 0 else: url_specified = False if self.wait_for_url_file and not os.path.exists(self.url_file): self.log.warn("url_file %r not found" % self.url_file) self.log.warn("Waiting up to %.1f seconds for it to arrive." % self.wait_for_url_file) tic = time.time() while not os.path.exists(self.url_file) and ( time.time() - tic < self.wait_for_url_file): # wait for url_file to exist, for up to 10 seconds time.sleep(0.1) if os.path.exists(self.url_file): self.log.info("Loading url_file %r" % self.url_file) with open(self.url_file) as f: d = json.loads(f.read()) if d['exec_key']: config.Session.key = asbytes(d['exec_key']) d['url'] = disambiguate_url(d['url'], d['location']) config.EngineFactory.url = d['url'] config.EngineFactory.location = d['location'] elif not url_specified: self.log.critical("Fatal: url file never arrived: %s" % self.url_file) self.exit(1) try: exec_lines = config.Kernel.exec_lines except AttributeError: config.Kernel.exec_lines = [] exec_lines = config.Kernel.exec_lines if self.startup_script: enc = sys.getfilesystemencoding() or 'utf8' cmd = "execfile(%r)" % self.startup_script.encode(enc) exec_lines.append(cmd) if self.startup_command: exec_lines.append(self.startup_command) # Create the underlying shell class and Engine # shell_class = import_item(self.master_config.Global.shell_class) # print self.config try: self.engine = EngineFactory(config=config, log=self.log) except: self.log.error("Couldn't start the Engine", exc_info=True) self.exit(1)
def init_schedulers(self): children = self.children mq = import_item(str(self.mq_class)) hub = self.factory # disambiguate url, in case of * monitor_url = disambiguate_url(hub.monitor_url) # maybe_inproc = 'inproc://monitor' if self.use_threads else monitor_url # IOPub relay (in a Process) q = mq(zmq.PUB, zmq.SUB, zmq.PUB, b'N/A',b'iopub') q.bind_in(hub.client_info['iopub']) q.bind_out(hub.engine_info['iopub']) q.setsockopt_out(zmq.SUBSCRIBE, b'') q.connect_mon(monitor_url) q.daemon=True children.append(q) # Multiplexer Queue (in a Process) q = mq(zmq.ROUTER, zmq.ROUTER, zmq.PUB, b'in', b'out') q.bind_in(hub.client_info['mux']) q.setsockopt_in(zmq.IDENTITY, b'mux') q.bind_out(hub.engine_info['mux']) q.connect_mon(monitor_url) q.daemon=True children.append(q) # Control Queue (in a Process) q = mq(zmq.ROUTER, zmq.ROUTER, zmq.PUB, b'incontrol', b'outcontrol') q.bind_in(hub.client_info['control']) q.setsockopt_in(zmq.IDENTITY, b'control') q.bind_out(hub.engine_info['control']) q.connect_mon(monitor_url) q.daemon=True children.append(q) try: scheme = self.config.TaskScheduler.scheme_name except AttributeError: scheme = TaskScheduler.scheme_name.get_default_value() # Task Queue (in a Process) if scheme == 'pure': self.log.warn("task::using pure XREQ Task scheduler") q = mq(zmq.ROUTER, zmq.DEALER, zmq.PUB, b'intask', b'outtask') # q.setsockopt_out(zmq.HWM, hub.hwm) q.bind_in(hub.client_info['task'][1]) q.setsockopt_in(zmq.IDENTITY, b'task') q.bind_out(hub.engine_info['task']) q.connect_mon(monitor_url) q.daemon=True children.append(q) elif scheme == 'none': self.log.warn("task::using no Task scheduler") else: self.log.info("task::using Python %s Task scheduler"%scheme) sargs = (hub.client_info['task'][1], hub.engine_info['task'], monitor_url, disambiguate_url(hub.client_info['notification'])) kwargs = dict(logname='scheduler', loglevel=self.log_level, log_url = self.log_url, config=dict(self.config)) if 'Process' in self.mq_class: # run the Python scheduler in a Process q = Process(target=launch_scheduler, args=sargs, kwargs=kwargs) q.daemon=True children.append(q) else: # single-threaded Controller kwargs['in_thread'] = True launch_scheduler(*sargs, **kwargs)
def __init__(self, url_or_file=None, profile='default', cluster_dir=None, ipython_dir=None, context=None, username=None, debug=False, exec_key=None, sshserver=None, sshkey=None, password=None, paramiko=None, timeout=10 ): super(Client, self).__init__(debug=debug, profile=profile) if context is None: context = zmq.Context.instance() self._context = context self._setup_cluster_dir(profile, cluster_dir, ipython_dir) if self._cd is not None: if url_or_file is None: url_or_file = pjoin(self._cd.security_dir, 'ipcontroller-client.json') assert url_or_file is not None, "I can't find enough information to connect to a hub!"\ " Please specify at least one of url_or_file or profile." try: util.validate_url(url_or_file) except AssertionError: if not os.path.exists(url_or_file): if self._cd: url_or_file = os.path.join(self._cd.security_dir, url_or_file) assert os.path.exists(url_or_file), "Not a valid connection file or url: %r"%url_or_file with open(url_or_file) as f: cfg = json.loads(f.read()) else: cfg = {'url':url_or_file} # sync defaults from args, json: if sshserver: cfg['ssh'] = sshserver if exec_key: cfg['exec_key'] = exec_key exec_key = cfg['exec_key'] sshserver=cfg['ssh'] url = cfg['url'] location = cfg.setdefault('location', None) cfg['url'] = util.disambiguate_url(cfg['url'], location) url = cfg['url'] self._config = cfg self._ssh = bool(sshserver or sshkey or password) if self._ssh and sshserver is None: # default to ssh via localhost sshserver = url.split('://')[1].split(':')[0] if self._ssh and password is None: if tunnel.try_passwordless_ssh(sshserver, sshkey, paramiko): password=False else: password = getpass("SSH Password for %s: "%sshserver) ssh_kwargs = dict(keyfile=sshkey, password=password, paramiko=paramiko) if exec_key is not None and os.path.isfile(exec_key): arg = 'keyfile' else: arg = 'key' key_arg = {arg:exec_key} if username is None: self.session = ss.StreamSession(**key_arg) else: self.session = ss.StreamSession(username, **key_arg) self._query_socket = self._context.socket(zmq.XREQ) self._query_socket.setsockopt(zmq.IDENTITY, self.session.session) if self._ssh: tunnel.tunnel_connection(self._query_socket, url, sshserver, **ssh_kwargs) else: self._query_socket.connect(url) self.session.debug = self.debug self._notification_handlers = {'registration_notification' : self._register_engine, 'unregistration_notification' : self._unregister_engine, 'shutdown_notification' : lambda msg: self.close(), } self._queue_handlers = {'execute_reply' : self._handle_execute_reply, 'apply_reply' : self._handle_apply_reply} self._connect(sshserver, ssh_kwargs, timeout)
def init_schedulers(self): children = self.children mq = import_item(str(self.mq_class)) f = self.factory ident = f.session.bsession # disambiguate url, in case of * monitor_url = disambiguate_url(f.monitor_url) # maybe_inproc = 'inproc://monitor' if self.use_threads else monitor_url # IOPub relay (in a Process) q = mq(zmq.PUB, zmq.SUB, zmq.PUB, b"N/A", b"iopub") q.bind_in(f.client_url("iopub")) q.setsockopt_in(zmq.IDENTITY, ident + b"_iopub") q.bind_out(f.engine_url("iopub")) q.setsockopt_out(zmq.SUBSCRIBE, b"") q.connect_mon(monitor_url) q.daemon = True children.append(q) # Multiplexer Queue (in a Process) q = mq(zmq.ROUTER, zmq.ROUTER, zmq.PUB, b"in", b"out") q.bind_in(f.client_url("mux")) q.setsockopt_in(zmq.IDENTITY, b"mux_in") q.bind_out(f.engine_url("mux")) q.setsockopt_out(zmq.IDENTITY, b"mux_out") q.connect_mon(monitor_url) q.daemon = True children.append(q) # Control Queue (in a Process) q = mq(zmq.ROUTER, zmq.ROUTER, zmq.PUB, b"incontrol", b"outcontrol") q.bind_in(f.client_url("control")) q.setsockopt_in(zmq.IDENTITY, b"control_in") q.bind_out(f.engine_url("control")) q.setsockopt_out(zmq.IDENTITY, b"control_out") q.connect_mon(monitor_url) q.daemon = True children.append(q) try: scheme = self.config.TaskScheduler.scheme_name except AttributeError: scheme = TaskScheduler.scheme_name.get_default_value() # Task Queue (in a Process) if scheme == "pure": self.log.warn("task::using pure DEALER Task scheduler") q = mq(zmq.ROUTER, zmq.DEALER, zmq.PUB, b"intask", b"outtask") # q.setsockopt_out(zmq.HWM, hub.hwm) q.bind_in(f.client_url("task")) q.setsockopt_in(zmq.IDENTITY, b"task_in") q.bind_out(f.engine_url("task")) q.setsockopt_out(zmq.IDENTITY, b"task_out") q.connect_mon(monitor_url) q.daemon = True children.append(q) elif scheme == "none": self.log.warn("task::using no Task scheduler") else: self.log.info("task::using Python %s Task scheduler" % scheme) sargs = ( f.client_url("task"), f.engine_url("task"), monitor_url, disambiguate_url(f.client_url("notification")), disambiguate_url(f.client_url("registration")), ) kwargs = dict(logname="scheduler", loglevel=self.log_level, log_url=self.log_url, config=dict(self.config)) if "Process" in self.mq_class: # run the Python scheduler in a Process q = Process(target=launch_scheduler, args=sargs, kwargs=kwargs) q.daemon = True children.append(q) else: # single-threaded Controller kwargs["in_thread"] = True launch_scheduler(*sargs, **kwargs)
def init_schedulers(self): children = self.children mq = import_item(str(self.mq_class)) f = self.factory ident = f.session.bsession # disambiguate url, in case of * monitor_url = disambiguate_url(f.monitor_url) # maybe_inproc = 'inproc://monitor' if self.use_threads else monitor_url # IOPub relay (in a Process) q = mq(zmq.PUB, zmq.SUB, zmq.PUB, b'N/A', b'iopub') q.bind_in(f.client_url('iopub')) q.setsockopt_in(zmq.IDENTITY, ident + b"_iopub") q.bind_out(f.engine_url('iopub')) q.setsockopt_out(zmq.SUBSCRIBE, b'') q.connect_mon(monitor_url) q.daemon = True children.append(q) # Multiplexer Queue (in a Process) q = mq(zmq.ROUTER, zmq.ROUTER, zmq.PUB, b'in', b'out') q.bind_in(f.client_url('mux')) q.setsockopt_in(zmq.IDENTITY, b'mux_in') q.bind_out(f.engine_url('mux')) q.setsockopt_out(zmq.IDENTITY, b'mux_out') q.connect_mon(monitor_url) q.daemon = True children.append(q) # Control Queue (in a Process) q = mq(zmq.ROUTER, zmq.ROUTER, zmq.PUB, b'incontrol', b'outcontrol') q.bind_in(f.client_url('control')) q.setsockopt_in(zmq.IDENTITY, b'control_in') q.bind_out(f.engine_url('control')) q.setsockopt_out(zmq.IDENTITY, b'control_out') q.connect_mon(monitor_url) q.daemon = True children.append(q) try: scheme = self.config.TaskScheduler.scheme_name except AttributeError: scheme = TaskScheduler.scheme_name.get_default_value() # Task Queue (in a Process) if scheme == 'pure': self.log.warn("task::using pure DEALER Task scheduler") q = mq(zmq.ROUTER, zmq.DEALER, zmq.PUB, b'intask', b'outtask') # q.setsockopt_out(zmq.HWM, hub.hwm) q.bind_in(f.client_url('task')) q.setsockopt_in(zmq.IDENTITY, b'task_in') q.bind_out(f.engine_url('task')) q.setsockopt_out(zmq.IDENTITY, b'task_out') q.connect_mon(monitor_url) q.daemon = True children.append(q) elif scheme == 'none': self.log.warn("task::using no Task scheduler") else: self.log.info("task::using Python %s Task scheduler" % scheme) sargs = ( f.client_url('task'), f.engine_url('task'), monitor_url, disambiguate_url(f.client_url('notification')), disambiguate_url(f.client_url('registration')), ) kwargs = dict(logname='scheduler', loglevel=self.log_level, log_url=self.log_url, config=dict(self.config)) if 'Process' in self.mq_class: # run the Python scheduler in a Process q = Process(target=launch_scheduler, args=sargs, kwargs=kwargs) q.daemon = True children.append(q) else: # single-threaded Controller kwargs['in_thread'] = True launch_scheduler(*sargs, **kwargs) # set unlimited HWM for all relay devices if hasattr(zmq, 'SNDHWM'): q = children[0] q.setsockopt_in(zmq.RCVHWM, 0) q.setsockopt_out(zmq.SNDHWM, 0) for q in children[1:]: if not hasattr(q, 'setsockopt_in'): continue q.setsockopt_in(zmq.SNDHWM, 0) q.setsockopt_in(zmq.RCVHWM, 0) q.setsockopt_out(zmq.SNDHWM, 0) q.setsockopt_out(zmq.RCVHWM, 0) q.setsockopt_mon(zmq.SNDHWM, 0)
def __init__(self, url_or_file=None, profile='default', cluster_dir=None, ipython_dir=None, context=None, username=None, debug=False, exec_key=None, sshserver=None, sshkey=None, password=None, paramiko=None, timeout=10): super(Client, self).__init__(debug=debug, profile=profile) if context is None: context = zmq.Context.instance() self._context = context self._setup_cluster_dir(profile, cluster_dir, ipython_dir) if self._cd is not None: if url_or_file is None: url_or_file = pjoin(self._cd.security_dir, 'ipcontroller-client.json') assert url_or_file is not None, "I can't find enough information to connect to a hub!"\ " Please specify at least one of url_or_file or profile." try: util.validate_url(url_or_file) except AssertionError: if not os.path.exists(url_or_file): if self._cd: url_or_file = os.path.join(self._cd.security_dir, url_or_file) assert os.path.exists( url_or_file ), "Not a valid connection file or url: %r" % url_or_file with open(url_or_file) as f: cfg = json.loads(f.read()) else: cfg = {'url': url_or_file} # sync defaults from args, json: if sshserver: cfg['ssh'] = sshserver if exec_key: cfg['exec_key'] = exec_key exec_key = cfg['exec_key'] sshserver = cfg['ssh'] url = cfg['url'] location = cfg.setdefault('location', None) cfg['url'] = util.disambiguate_url(cfg['url'], location) url = cfg['url'] self._config = cfg self._ssh = bool(sshserver or sshkey or password) if self._ssh and sshserver is None: # default to ssh via localhost sshserver = url.split('://')[1].split(':')[0] if self._ssh and password is None: if tunnel.try_passwordless_ssh(sshserver, sshkey, paramiko): password = False else: password = getpass("SSH Password for %s: " % sshserver) ssh_kwargs = dict(keyfile=sshkey, password=password, paramiko=paramiko) if exec_key is not None and os.path.isfile(exec_key): arg = 'keyfile' else: arg = 'key' key_arg = {arg: exec_key} if username is None: self.session = ss.StreamSession(**key_arg) else: self.session = ss.StreamSession(username, **key_arg) self._query_socket = self._context.socket(zmq.XREQ) self._query_socket.setsockopt(zmq.IDENTITY, self.session.session) if self._ssh: tunnel.tunnel_connection(self._query_socket, url, sshserver, **ssh_kwargs) else: self._query_socket.connect(url) self.session.debug = self.debug self._notification_handlers = { 'registration_notification': self._register_engine, 'unregistration_notification': self._unregister_engine, 'shutdown_notification': lambda msg: self.close(), } self._queue_handlers = { 'execute_reply': self._handle_execute_reply, 'apply_reply': self._handle_apply_reply } self._connect(sshserver, ssh_kwargs, timeout)
def connect_socket(s, url): url = util.disambiguate_url(url, self._config['location']) if self._ssh: return tunnel.tunnel_connection(s, url, sshserver, **ssh_kwargs) else: return s.connect(url)
def init_hub(self): """construct""" client_iface = "%s://%s:" % (self.client_transport, self.client_ip) + "%i" engine_iface = "%s://%s:" % (self.engine_transport, self.engine_ip) + "%i" ctx = self.context loop = self.loop # Registrar socket q = ZMQStream(ctx.socket(zmq.ROUTER), loop) q.bind(client_iface % self.regport) self.log.info("Hub listening on %s for registration.", client_iface % self.regport) if self.client_ip != self.engine_ip: q.bind(engine_iface % self.regport) self.log.info("Hub listening on %s for registration.", engine_iface % self.regport) ### Engine connections ### # heartbeat hpub = ctx.socket(zmq.PUB) hpub.bind(engine_iface % self.hb[0]) hrep = ctx.socket(zmq.ROUTER) hrep.bind(engine_iface % self.hb[1]) self.heartmonitor = HeartMonitor(loop=loop, config=self.config, log=self.log, pingstream=ZMQStream(hpub,loop), pongstream=ZMQStream(hrep,loop) ) ### Client connections ### # Notifier socket n = ZMQStream(ctx.socket(zmq.PUB), loop) n.bind(client_iface%self.notifier_port) ### build and launch the queues ### # monitor socket sub = ctx.socket(zmq.SUB) sub.setsockopt(zmq.SUBSCRIBE, b"") sub.bind(self.monitor_url) sub.bind('inproc://monitor') sub = ZMQStream(sub, loop) # connect the db self.log.info('Hub using DB backend: %r'%(self.db_class.split()[-1])) # cdir = self.config.Global.cluster_dir self.db = import_item(str(self.db_class))(session=self.session.session, config=self.config, log=self.log) time.sleep(.25) try: scheme = self.config.TaskScheduler.scheme_name except AttributeError: from .scheduler import TaskScheduler scheme = TaskScheduler.scheme_name.get_default_value() # build connection dicts self.engine_info = { 'control' : engine_iface%self.control[1], 'mux': engine_iface%self.mux[1], 'heartbeat': (engine_iface%self.hb[0], engine_iface%self.hb[1]), 'task' : engine_iface%self.task[1], 'iopub' : engine_iface%self.iopub[1], # 'monitor' : engine_iface%self.mon_port, } self.client_info = { 'control' : client_iface%self.control[0], 'mux': client_iface%self.mux[0], 'task' : (scheme, client_iface%self.task[0]), 'iopub' : client_iface%self.iopub[0], 'notification': client_iface%self.notifier_port } self.log.debug("Hub engine addrs: %s", self.engine_info) self.log.debug("Hub client addrs: %s", self.client_info) # resubmit stream r = ZMQStream(ctx.socket(zmq.DEALER), loop) url = util.disambiguate_url(self.client_info['task'][-1]) r.setsockopt(zmq.IDENTITY, self.session.bsession) r.connect(url) self.hub = Hub(loop=loop, session=self.session, monitor=sub, heartmonitor=self.heartmonitor, query=q, notifier=n, resubmit=r, db=self.db, engine_info=self.engine_info, client_info=self.client_info, log=self.log)
def disambiguate_dns_url(url, location): """accept either IP address or dns name, and return IP""" if not ip_pat.match(location): location = socket.gethostbyname(location) return disambiguate_url(url, location)